diff --git a/packages/google-cloud-dataplex/docs/dataplex_v1/content_service.rst b/packages/google-cloud-dataplex/docs/dataplex_v1/content_service.rst new file mode 100644 index 000000000000..ce3774365501 --- /dev/null +++ b/packages/google-cloud-dataplex/docs/dataplex_v1/content_service.rst @@ -0,0 +1,10 @@ +ContentService +-------------------------------- + +.. automodule:: google.cloud.dataplex_v1.services.content_service + :members: + :inherited-members: + +.. automodule:: google.cloud.dataplex_v1.services.content_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dataplex/docs/dataplex_v1/services.rst b/packages/google-cloud-dataplex/docs/dataplex_v1/services.rst index 372077df3123..14f9682dfd04 100644 --- a/packages/google-cloud-dataplex/docs/dataplex_v1/services.rst +++ b/packages/google-cloud-dataplex/docs/dataplex_v1/services.rst @@ -3,5 +3,6 @@ Services for Google Cloud Dataplex v1 API .. toctree:: :maxdepth: 2 + content_service dataplex_service metadata_service diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex/__init__.py b/packages/google-cloud-dataplex/google/cloud/dataplex/__init__.py index 7ccf042f5688..12aef4588dd6 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex/__init__.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex/__init__.py @@ -14,6 +14,12 @@ # limitations under the License. # +from google.cloud.dataplex_v1.services.content_service.client import ( + ContentServiceClient, +) +from google.cloud.dataplex_v1.services.content_service.async_client import ( + ContentServiceAsyncClient, +) from google.cloud.dataplex_v1.services.dataplex_service.client import ( DataplexServiceClient, ) @@ -27,9 +33,22 @@ MetadataServiceAsyncClient, ) +from google.cloud.dataplex_v1.types.analyze import Content +from google.cloud.dataplex_v1.types.analyze import Environment +from google.cloud.dataplex_v1.types.analyze import Session +from google.cloud.dataplex_v1.types.content import CreateContentRequest +from google.cloud.dataplex_v1.types.content import DeleteContentRequest +from google.cloud.dataplex_v1.types.content import GetContentRequest +from google.cloud.dataplex_v1.types.content import ListContentRequest +from google.cloud.dataplex_v1.types.content import ListContentResponse +from google.cloud.dataplex_v1.types.content import UpdateContentRequest from google.cloud.dataplex_v1.types.logs import DiscoveryEvent from google.cloud.dataplex_v1.types.logs import JobEvent from google.cloud.dataplex_v1.types.logs import SessionEvent +from google.cloud.dataplex_v1.types.metadata_ import CreateEntityRequest +from google.cloud.dataplex_v1.types.metadata_ import CreatePartitionRequest +from google.cloud.dataplex_v1.types.metadata_ import DeleteEntityRequest +from google.cloud.dataplex_v1.types.metadata_ import DeletePartitionRequest from google.cloud.dataplex_v1.types.metadata_ import Entity from google.cloud.dataplex_v1.types.metadata_ import GetEntityRequest from google.cloud.dataplex_v1.types.metadata_ import GetPartitionRequest @@ -40,6 +59,7 @@ from google.cloud.dataplex_v1.types.metadata_ import Partition from google.cloud.dataplex_v1.types.metadata_ import Schema from google.cloud.dataplex_v1.types.metadata_ import StorageFormat +from google.cloud.dataplex_v1.types.metadata_ import UpdateEntityRequest from google.cloud.dataplex_v1.types.metadata_ import StorageSystem from google.cloud.dataplex_v1.types.resources import Action from google.cloud.dataplex_v1.types.resources import Asset @@ -49,14 +69,17 @@ from google.cloud.dataplex_v1.types.resources import State from google.cloud.dataplex_v1.types.service import CancelJobRequest from google.cloud.dataplex_v1.types.service import CreateAssetRequest +from google.cloud.dataplex_v1.types.service import CreateEnvironmentRequest from google.cloud.dataplex_v1.types.service import CreateLakeRequest from google.cloud.dataplex_v1.types.service import CreateTaskRequest from google.cloud.dataplex_v1.types.service import CreateZoneRequest from google.cloud.dataplex_v1.types.service import DeleteAssetRequest +from google.cloud.dataplex_v1.types.service import DeleteEnvironmentRequest from google.cloud.dataplex_v1.types.service import DeleteLakeRequest from google.cloud.dataplex_v1.types.service import DeleteTaskRequest from google.cloud.dataplex_v1.types.service import DeleteZoneRequest from google.cloud.dataplex_v1.types.service import GetAssetRequest +from google.cloud.dataplex_v1.types.service import GetEnvironmentRequest from google.cloud.dataplex_v1.types.service import GetJobRequest from google.cloud.dataplex_v1.types.service import GetLakeRequest from google.cloud.dataplex_v1.types.service import GetTaskRequest @@ -65,11 +88,15 @@ from google.cloud.dataplex_v1.types.service import ListAssetActionsRequest from google.cloud.dataplex_v1.types.service import ListAssetsRequest from google.cloud.dataplex_v1.types.service import ListAssetsResponse +from google.cloud.dataplex_v1.types.service import ListEnvironmentsRequest +from google.cloud.dataplex_v1.types.service import ListEnvironmentsResponse from google.cloud.dataplex_v1.types.service import ListJobsRequest from google.cloud.dataplex_v1.types.service import ListJobsResponse from google.cloud.dataplex_v1.types.service import ListLakeActionsRequest from google.cloud.dataplex_v1.types.service import ListLakesRequest from google.cloud.dataplex_v1.types.service import ListLakesResponse +from google.cloud.dataplex_v1.types.service import ListSessionsRequest +from google.cloud.dataplex_v1.types.service import ListSessionsResponse from google.cloud.dataplex_v1.types.service import ListTasksRequest from google.cloud.dataplex_v1.types.service import ListTasksResponse from google.cloud.dataplex_v1.types.service import ListZoneActionsRequest @@ -77,6 +104,7 @@ from google.cloud.dataplex_v1.types.service import ListZonesResponse from google.cloud.dataplex_v1.types.service import OperationMetadata from google.cloud.dataplex_v1.types.service import UpdateAssetRequest +from google.cloud.dataplex_v1.types.service import UpdateEnvironmentRequest from google.cloud.dataplex_v1.types.service import UpdateLakeRequest from google.cloud.dataplex_v1.types.service import UpdateTaskRequest from google.cloud.dataplex_v1.types.service import UpdateZoneRequest @@ -84,13 +112,28 @@ from google.cloud.dataplex_v1.types.tasks import Task __all__ = ( + "ContentServiceClient", + "ContentServiceAsyncClient", "DataplexServiceClient", "DataplexServiceAsyncClient", "MetadataServiceClient", "MetadataServiceAsyncClient", + "Content", + "Environment", + "Session", + "CreateContentRequest", + "DeleteContentRequest", + "GetContentRequest", + "ListContentRequest", + "ListContentResponse", + "UpdateContentRequest", "DiscoveryEvent", "JobEvent", "SessionEvent", + "CreateEntityRequest", + "CreatePartitionRequest", + "DeleteEntityRequest", + "DeletePartitionRequest", "Entity", "GetEntityRequest", "GetPartitionRequest", @@ -101,6 +144,7 @@ "Partition", "Schema", "StorageFormat", + "UpdateEntityRequest", "StorageSystem", "Action", "Asset", @@ -110,14 +154,17 @@ "State", "CancelJobRequest", "CreateAssetRequest", + "CreateEnvironmentRequest", "CreateLakeRequest", "CreateTaskRequest", "CreateZoneRequest", "DeleteAssetRequest", + "DeleteEnvironmentRequest", "DeleteLakeRequest", "DeleteTaskRequest", "DeleteZoneRequest", "GetAssetRequest", + "GetEnvironmentRequest", "GetJobRequest", "GetLakeRequest", "GetTaskRequest", @@ -126,11 +173,15 @@ "ListAssetActionsRequest", "ListAssetsRequest", "ListAssetsResponse", + "ListEnvironmentsRequest", + "ListEnvironmentsResponse", "ListJobsRequest", "ListJobsResponse", "ListLakeActionsRequest", "ListLakesRequest", "ListLakesResponse", + "ListSessionsRequest", + "ListSessionsResponse", "ListTasksRequest", "ListTasksResponse", "ListZoneActionsRequest", @@ -138,6 +189,7 @@ "ListZonesResponse", "OperationMetadata", "UpdateAssetRequest", + "UpdateEnvironmentRequest", "UpdateLakeRequest", "UpdateTaskRequest", "UpdateZoneRequest", diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/__init__.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/__init__.py index 423daa7ee703..06ab200edc13 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/__init__.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/__init__.py @@ -14,14 +14,29 @@ # limitations under the License. # +from .services.content_service import ContentServiceClient +from .services.content_service import ContentServiceAsyncClient from .services.dataplex_service import DataplexServiceClient from .services.dataplex_service import DataplexServiceAsyncClient from .services.metadata_service import MetadataServiceClient from .services.metadata_service import MetadataServiceAsyncClient +from .types.analyze import Content +from .types.analyze import Environment +from .types.analyze import Session +from .types.content import CreateContentRequest +from .types.content import DeleteContentRequest +from .types.content import GetContentRequest +from .types.content import ListContentRequest +from .types.content import ListContentResponse +from .types.content import UpdateContentRequest from .types.logs import DiscoveryEvent from .types.logs import JobEvent from .types.logs import SessionEvent +from .types.metadata_ import CreateEntityRequest +from .types.metadata_ import CreatePartitionRequest +from .types.metadata_ import DeleteEntityRequest +from .types.metadata_ import DeletePartitionRequest from .types.metadata_ import Entity from .types.metadata_ import GetEntityRequest from .types.metadata_ import GetPartitionRequest @@ -32,6 +47,7 @@ from .types.metadata_ import Partition from .types.metadata_ import Schema from .types.metadata_ import StorageFormat +from .types.metadata_ import UpdateEntityRequest from .types.metadata_ import StorageSystem from .types.resources import Action from .types.resources import Asset @@ -41,14 +57,17 @@ from .types.resources import State from .types.service import CancelJobRequest from .types.service import CreateAssetRequest +from .types.service import CreateEnvironmentRequest from .types.service import CreateLakeRequest from .types.service import CreateTaskRequest from .types.service import CreateZoneRequest from .types.service import DeleteAssetRequest +from .types.service import DeleteEnvironmentRequest from .types.service import DeleteLakeRequest from .types.service import DeleteTaskRequest from .types.service import DeleteZoneRequest from .types.service import GetAssetRequest +from .types.service import GetEnvironmentRequest from .types.service import GetJobRequest from .types.service import GetLakeRequest from .types.service import GetTaskRequest @@ -57,11 +76,15 @@ from .types.service import ListAssetActionsRequest from .types.service import ListAssetsRequest from .types.service import ListAssetsResponse +from .types.service import ListEnvironmentsRequest +from .types.service import ListEnvironmentsResponse from .types.service import ListJobsRequest from .types.service import ListJobsResponse from .types.service import ListLakeActionsRequest from .types.service import ListLakesRequest from .types.service import ListLakesResponse +from .types.service import ListSessionsRequest +from .types.service import ListSessionsResponse from .types.service import ListTasksRequest from .types.service import ListTasksResponse from .types.service import ListZoneActionsRequest @@ -69,6 +92,7 @@ from .types.service import ListZonesResponse from .types.service import OperationMetadata from .types.service import UpdateAssetRequest +from .types.service import UpdateEnvironmentRequest from .types.service import UpdateLakeRequest from .types.service import UpdateTaskRequest from .types.service import UpdateZoneRequest @@ -76,25 +100,39 @@ from .types.tasks import Task __all__ = ( + "ContentServiceAsyncClient", "DataplexServiceAsyncClient", "MetadataServiceAsyncClient", "Action", "Asset", "AssetStatus", "CancelJobRequest", + "Content", + "ContentServiceClient", "CreateAssetRequest", + "CreateContentRequest", + "CreateEntityRequest", + "CreateEnvironmentRequest", "CreateLakeRequest", + "CreatePartitionRequest", "CreateTaskRequest", "CreateZoneRequest", "DataplexServiceClient", "DeleteAssetRequest", + "DeleteContentRequest", + "DeleteEntityRequest", + "DeleteEnvironmentRequest", "DeleteLakeRequest", + "DeletePartitionRequest", "DeleteTaskRequest", "DeleteZoneRequest", "DiscoveryEvent", "Entity", + "Environment", "GetAssetRequest", + "GetContentRequest", "GetEntityRequest", + "GetEnvironmentRequest", "GetJobRequest", "GetLakeRequest", "GetPartitionRequest", @@ -107,8 +145,12 @@ "ListAssetActionsRequest", "ListAssetsRequest", "ListAssetsResponse", + "ListContentRequest", + "ListContentResponse", "ListEntitiesRequest", "ListEntitiesResponse", + "ListEnvironmentsRequest", + "ListEnvironmentsResponse", "ListJobsRequest", "ListJobsResponse", "ListLakeActionsRequest", @@ -116,6 +158,8 @@ "ListLakesResponse", "ListPartitionsRequest", "ListPartitionsResponse", + "ListSessionsRequest", + "ListSessionsResponse", "ListTasksRequest", "ListTasksResponse", "ListZoneActionsRequest", @@ -125,12 +169,16 @@ "OperationMetadata", "Partition", "Schema", + "Session", "SessionEvent", "State", "StorageFormat", "StorageSystem", "Task", "UpdateAssetRequest", + "UpdateContentRequest", + "UpdateEntityRequest", + "UpdateEnvironmentRequest", "UpdateLakeRequest", "UpdateTaskRequest", "UpdateZoneRequest", diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_metadata.json b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_metadata.json index ca75ff399c52..4ff02bf58df5 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_metadata.json +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_metadata.json @@ -5,6 +5,70 @@ "protoPackage": "google.cloud.dataplex.v1", "schema": "1.0", "services": { + "ContentService": { + "clients": { + "grpc": { + "libraryClient": "ContentServiceClient", + "rpcs": { + "CreateContent": { + "methods": [ + "create_content" + ] + }, + "DeleteContent": { + "methods": [ + "delete_content" + ] + }, + "GetContent": { + "methods": [ + "get_content" + ] + }, + "ListContent": { + "methods": [ + "list_content" + ] + }, + "UpdateContent": { + "methods": [ + "update_content" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ContentServiceAsyncClient", + "rpcs": { + "CreateContent": { + "methods": [ + "create_content" + ] + }, + "DeleteContent": { + "methods": [ + "delete_content" + ] + }, + "GetContent": { + "methods": [ + "get_content" + ] + }, + "ListContent": { + "methods": [ + "list_content" + ] + }, + "UpdateContent": { + "methods": [ + "update_content" + ] + } + } + } + } + }, "DataplexService": { "clients": { "grpc": { @@ -20,6 +84,11 @@ "create_asset" ] }, + "CreateEnvironment": { + "methods": [ + "create_environment" + ] + }, "CreateLake": { "methods": [ "create_lake" @@ -40,6 +109,11 @@ "delete_asset" ] }, + "DeleteEnvironment": { + "methods": [ + "delete_environment" + ] + }, "DeleteLake": { "methods": [ "delete_lake" @@ -60,6 +134,11 @@ "get_asset" ] }, + "GetEnvironment": { + "methods": [ + "get_environment" + ] + }, "GetJob": { "methods": [ "get_job" @@ -90,6 +169,11 @@ "list_assets" ] }, + "ListEnvironments": { + "methods": [ + "list_environments" + ] + }, "ListJobs": { "methods": [ "list_jobs" @@ -105,6 +189,11 @@ "list_lakes" ] }, + "ListSessions": { + "methods": [ + "list_sessions" + ] + }, "ListTasks": { "methods": [ "list_tasks" @@ -125,6 +214,11 @@ "update_asset" ] }, + "UpdateEnvironment": { + "methods": [ + "update_environment" + ] + }, "UpdateLake": { "methods": [ "update_lake" @@ -155,6 +249,11 @@ "create_asset" ] }, + "CreateEnvironment": { + "methods": [ + "create_environment" + ] + }, "CreateLake": { "methods": [ "create_lake" @@ -175,6 +274,11 @@ "delete_asset" ] }, + "DeleteEnvironment": { + "methods": [ + "delete_environment" + ] + }, "DeleteLake": { "methods": [ "delete_lake" @@ -195,6 +299,11 @@ "get_asset" ] }, + "GetEnvironment": { + "methods": [ + "get_environment" + ] + }, "GetJob": { "methods": [ "get_job" @@ -225,6 +334,11 @@ "list_assets" ] }, + "ListEnvironments": { + "methods": [ + "list_environments" + ] + }, "ListJobs": { "methods": [ "list_jobs" @@ -240,6 +354,11 @@ "list_lakes" ] }, + "ListSessions": { + "methods": [ + "list_sessions" + ] + }, "ListTasks": { "methods": [ "list_tasks" @@ -260,6 +379,11 @@ "update_asset" ] }, + "UpdateEnvironment": { + "methods": [ + "update_environment" + ] + }, "UpdateLake": { "methods": [ "update_lake" @@ -284,6 +408,26 @@ "grpc": { "libraryClient": "MetadataServiceClient", "rpcs": { + "CreateEntity": { + "methods": [ + "create_entity" + ] + }, + "CreatePartition": { + "methods": [ + "create_partition" + ] + }, + "DeleteEntity": { + "methods": [ + "delete_entity" + ] + }, + "DeletePartition": { + "methods": [ + "delete_partition" + ] + }, "GetEntity": { "methods": [ "get_entity" @@ -303,12 +447,37 @@ "methods": [ "list_partitions" ] + }, + "UpdateEntity": { + "methods": [ + "update_entity" + ] } } }, "grpc-async": { "libraryClient": "MetadataServiceAsyncClient", "rpcs": { + "CreateEntity": { + "methods": [ + "create_entity" + ] + }, + "CreatePartition": { + "methods": [ + "create_partition" + ] + }, + "DeleteEntity": { + "methods": [ + "delete_entity" + ] + }, + "DeletePartition": { + "methods": [ + "delete_partition" + ] + }, "GetEntity": { "methods": [ "get_entity" @@ -328,6 +497,11 @@ "methods": [ "list_partitions" ] + }, + "UpdateEntity": { + "methods": [ + "update_entity" + ] } } } diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/__init__.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/__init__.py new file mode 100644 index 000000000000..e4c189237411 --- /dev/null +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import ContentServiceClient +from .async_client import ContentServiceAsyncClient + +__all__ = ( + "ContentServiceClient", + "ContentServiceAsyncClient", +) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/async_client.py new file mode 100644 index 000000000000..ebbc180f7da7 --- /dev/null +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/async_client.py @@ -0,0 +1,700 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.dataplex_v1.services.content_service import pagers +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import content +from google.cloud.dataplex_v1.types import content as gcd_content +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import ContentServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import ContentServiceGrpcAsyncIOTransport +from .client import ContentServiceClient + + +class ContentServiceAsyncClient: + """ContentService manages Notebook and SQL Scripts for Dataplex.""" + + _client: ContentServiceClient + + DEFAULT_ENDPOINT = ContentServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ContentServiceClient.DEFAULT_MTLS_ENDPOINT + + content_path = staticmethod(ContentServiceClient.content_path) + parse_content_path = staticmethod(ContentServiceClient.parse_content_path) + lake_path = staticmethod(ContentServiceClient.lake_path) + parse_lake_path = staticmethod(ContentServiceClient.parse_lake_path) + common_billing_account_path = staticmethod( + ContentServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + ContentServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(ContentServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + ContentServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + ContentServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + ContentServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(ContentServiceClient.common_project_path) + parse_common_project_path = staticmethod( + ContentServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(ContentServiceClient.common_location_path) + parse_common_location_path = staticmethod( + ContentServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ContentServiceAsyncClient: The constructed client. + """ + return ContentServiceClient.from_service_account_info.__func__(ContentServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ContentServiceAsyncClient: The constructed client. + """ + return ContentServiceClient.from_service_account_file.__func__(ContentServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ContentServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ContentServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ContentServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(ContentServiceClient).get_transport_class, type(ContentServiceClient) + ) + + def __init__( + self, + *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, ContentServiceTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the content service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.ContentServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ContentServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_content( + self, + request: Union[gcd_content.CreateContentRequest, dict] = None, + *, + parent: str = None, + content: analyze.Content = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analyze.Content: + r"""Create a content. + + .. code-block:: + + from google.cloud import dataplex_v1 + + def sample_create_content(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + content = dataplex_v1.Content() + content.data_text = "data_text_value" + content.sql_script.engine = "SPARK" + content.path = "path_value" + + request = dataplex_v1.CreateContentRequest( + parent="parent_value", + content=content, + ) + + # Make the request + response = client.create_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateContentRequest, dict]): + The request object. Create content request. + parent (:class:`str`): + Required. The resource name of the parent lake: + projects/{project_id}/locations/{location_id}/lakes/{lake_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + content (:class:`google.cloud.dataplex_v1.types.Content`): + Required. Content resource. + This corresponds to the ``content`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Content: + Content represents a user-visible + notebook or a sql script + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, content]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcd_content.CreateContentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if content is not None: + request.content = content + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_content, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def update_content( + self, + request: Union[gcd_content.UpdateContentRequest, dict] = None, + *, + content: analyze.Content = None, + update_mask: field_mask_pb2.FieldMask = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analyze.Content: + r"""Update a content. Only supports full resource update. + + .. code-block:: + + from google.cloud import dataplex_v1 + + def sample_update_content(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + content = dataplex_v1.Content() + content.data_text = "data_text_value" + content.sql_script.engine = "SPARK" + content.path = "path_value" + + request = dataplex_v1.UpdateContentRequest( + content=content, + ) + + # Make the request + response = client.update_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateContentRequest, dict]): + The request object. Update content request. + content (:class:`google.cloud.dataplex_v1.types.Content`): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + + This corresponds to the ``content`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Content: + Content represents a user-visible + notebook or a sql script + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([content, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gcd_content.UpdateContentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if content is not None: + request.content = content + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_content, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("content.name", request.content.name),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_content( + self, + request: Union[content.DeleteContentRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete a content. + + .. code-block:: + + from google.cloud import dataplex_v1 + + def sample_delete_content(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteContentRequest( + name="name_value", + ) + + # Make the request + client.delete_content(request=request) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteContentRequest, dict]): + The request object. Delete content request. + name (:class:`str`): + Required. The resource name of the content: + projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = content.DeleteContentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_content, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def get_content( + self, + request: Union[content.GetContentRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analyze.Content: + r"""Get a content resource. + + .. code-block:: + + from google.cloud import dataplex_v1 + + def sample_get_content(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetContentRequest( + name="name_value", + ) + + # Make the request + response = client.get_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetContentRequest, dict]): + The request object. Get content request. + name (:class:`str`): + Required. The resource name of the content: + projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Content: + Content represents a user-visible + notebook or a sql script + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = content.GetContentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_content, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_content( + self, + request: Union[content.ListContentRequest, dict] = None, + *, + parent: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListContentAsyncPager: + r"""List content. + + .. code-block:: + + from google.cloud import dataplex_v1 + + def sample_list_content(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListContentRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_content(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListContentRequest, dict]): + The request object. List content request. Returns the + BASIC Content view. + parent (:class:`str`): + Required. The resource name of the parent lake: + projects/{project_id}/locations/{location_id}/lakes/{lake_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.content_service.pagers.ListContentAsyncPager: + List content response. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = content.ListContentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_content, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListContentAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-dataplex",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("ContentServiceAsyncClient",) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/client.py new file mode 100644 index 000000000000..073e5f4a05ad --- /dev/null +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/client.py @@ -0,0 +1,923 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.dataplex_v1.services.content_service import pagers +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import content +from google.cloud.dataplex_v1.types import content as gcd_content +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import ContentServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import ContentServiceGrpcTransport +from .transports.grpc_asyncio import ContentServiceGrpcAsyncIOTransport + + +class ContentServiceClientMeta(type): + """Metaclass for the ContentService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[ContentServiceTransport]] + _transport_registry["grpc"] = ContentServiceGrpcTransport + _transport_registry["grpc_asyncio"] = ContentServiceGrpcAsyncIOTransport + + def get_transport_class(cls, label: str = None,) -> Type[ContentServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ContentServiceClient(metaclass=ContentServiceClientMeta): + """ContentService manages Notebook and SQL Scripts for Dataplex.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dataplex.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ContentServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ContentServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ContentServiceTransport: + """Returns the transport used by the client instance. + + Returns: + ContentServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def content_path(project: str, location: str, lake: str, content: str,) -> str: + """Returns a fully-qualified content string.""" + return "projects/{project}/locations/{location}/lakes/{lake}/content/{content}".format( + project=project, location=location, lake=lake, content=content, + ) + + @staticmethod + def parse_content_path(path: str) -> Dict[str, str]: + """Parses a content path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/content/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def lake_path(project: str, location: str, lake: str,) -> str: + """Returns a fully-qualified lake string.""" + return "projects/{project}/locations/{location}/lakes/{lake}".format( + project=project, location=location, lake=lake, + ) + + @staticmethod + def parse_lake_path(path: str) -> Dict[str, str]: + """Parses a lake path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, ContentServiceTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the content service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ContentServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options + ) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, ContentServiceTransport): + # transport is a ContentServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + ) + + def create_content( + self, + request: Union[gcd_content.CreateContentRequest, dict] = None, + *, + parent: str = None, + content: analyze.Content = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analyze.Content: + r"""Create a content. + + + .. code-block:: + + from google.cloud import dataplex_v1 + + def sample_create_content(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + content = dataplex_v1.Content() + content.data_text = "data_text_value" + content.sql_script.engine = "SPARK" + content.path = "path_value" + + request = dataplex_v1.CreateContentRequest( + parent="parent_value", + content=content, + ) + + # Make the request + response = client.create_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateContentRequest, dict]): + The request object. Create content request. + parent (str): + Required. The resource name of the parent lake: + projects/{project_id}/locations/{location_id}/lakes/{lake_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + content (google.cloud.dataplex_v1.types.Content): + Required. Content resource. + This corresponds to the ``content`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Content: + Content represents a user-visible + notebook or a sql script + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, content]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcd_content.CreateContentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcd_content.CreateContentRequest): + request = gcd_content.CreateContentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if content is not None: + request.content = content + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def update_content( + self, + request: Union[gcd_content.UpdateContentRequest, dict] = None, + *, + content: analyze.Content = None, + update_mask: field_mask_pb2.FieldMask = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analyze.Content: + r"""Update a content. Only supports full resource update. + + + .. code-block:: + + from google.cloud import dataplex_v1 + + def sample_update_content(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + content = dataplex_v1.Content() + content.data_text = "data_text_value" + content.sql_script.engine = "SPARK" + content.path = "path_value" + + request = dataplex_v1.UpdateContentRequest( + content=content, + ) + + # Make the request + response = client.update_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateContentRequest, dict]): + The request object. Update content request. + content (google.cloud.dataplex_v1.types.Content): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + + This corresponds to the ``content`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Content: + Content represents a user-visible + notebook or a sql script + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([content, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gcd_content.UpdateContentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gcd_content.UpdateContentRequest): + request = gcd_content.UpdateContentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if content is not None: + request.content = content + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("content.name", request.content.name),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_content( + self, + request: Union[content.DeleteContentRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete a content. + + + .. code-block:: + + from google.cloud import dataplex_v1 + + def sample_delete_content(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteContentRequest( + name="name_value", + ) + + # Make the request + client.delete_content(request=request) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteContentRequest, dict]): + The request object. Delete content request. + name (str): + Required. The resource name of the content: + projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a content.DeleteContentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, content.DeleteContentRequest): + request = content.DeleteContentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def get_content( + self, + request: Union[content.GetContentRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analyze.Content: + r"""Get a content resource. + + + .. code-block:: + + from google.cloud import dataplex_v1 + + def sample_get_content(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetContentRequest( + name="name_value", + ) + + # Make the request + response = client.get_content(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetContentRequest, dict]): + The request object. Get content request. + name (str): + Required. The resource name of the content: + projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Content: + Content represents a user-visible + notebook or a sql script + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a content.GetContentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, content.GetContentRequest): + request = content.GetContentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_content( + self, + request: Union[content.ListContentRequest, dict] = None, + *, + parent: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListContentPager: + r"""List content. + + + .. code-block:: + + from google.cloud import dataplex_v1 + + def sample_list_content(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListContentRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_content(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListContentRequest, dict]): + The request object. List content request. Returns the + BASIC Content view. + parent (str): + Required. The resource name of the parent lake: + projects/{project_id}/locations/{location_id}/lakes/{lake_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.content_service.pagers.ListContentPager: + List content response. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a content.ListContentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, content.ListContentRequest): + request = content.ListContentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListContentPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-dataplex",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("ContentServiceClient",) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/pagers.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/pagers.py new file mode 100644 index 000000000000..a1090650bb60 --- /dev/null +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/pagers.py @@ -0,0 +1,156 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Sequence, + Tuple, + Optional, + Iterator, +) + +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import content + + +class ListContentPager: + """A pager for iterating through ``list_content`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListContentResponse` object, and + provides an ``__iter__`` method to iterate through its + ``content`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListContent`` requests and continue to iterate + through the ``content`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListContentResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., content.ListContentResponse], + request: content.ListContentRequest, + response: content.ListContentResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListContentRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListContentResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = content.ListContentRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[content.ListContentResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[analyze.Content]: + for page in self.pages: + yield from page.content + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListContentAsyncPager: + """A pager for iterating through ``list_content`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListContentResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``content`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListContent`` requests and continue to iterate + through the ``content`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListContentResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[content.ListContentResponse]], + request: content.ListContentRequest, + response: content.ListContentResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListContentRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListContentResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = content.ListContentRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[content.ListContentResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[analyze.Content]: + async def async_generator(): + async for page in self.pages: + for response in page.content: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/__init__.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/__init__.py new file mode 100644 index 000000000000..4dc342fc317c --- /dev/null +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ContentServiceTransport +from .grpc import ContentServiceGrpcTransport +from .grpc_asyncio import ContentServiceGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ContentServiceTransport]] +_transport_registry["grpc"] = ContentServiceGrpcTransport +_transport_registry["grpc_asyncio"] = ContentServiceGrpcAsyncIOTransport + +__all__ = ( + "ContentServiceTransport", + "ContentServiceGrpcTransport", + "ContentServiceGrpcAsyncIOTransport", +) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/base.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/base.py new file mode 100644 index 000000000000..1c6eaa04bc67 --- /dev/null +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/base.py @@ -0,0 +1,195 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import pkg_resources + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import content +from google.cloud.dataplex_v1.types import content as gcd_content +from google.protobuf import empty_pb2 # type: ignore + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-dataplex",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +class ContentServiceTransport(abc.ABC): + """Abstract transport class for ContentService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "dataplex.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_content: gapic_v1.method.wrap_method( + self.create_content, default_timeout=None, client_info=client_info, + ), + self.update_content: gapic_v1.method.wrap_method( + self.update_content, default_timeout=None, client_info=client_info, + ), + self.delete_content: gapic_v1.method.wrap_method( + self.delete_content, default_timeout=None, client_info=client_info, + ), + self.get_content: gapic_v1.method.wrap_method( + self.get_content, default_timeout=None, client_info=client_info, + ), + self.list_content: gapic_v1.method.wrap_method( + self.list_content, default_timeout=None, client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_content( + self, + ) -> Callable[ + [gcd_content.CreateContentRequest], + Union[analyze.Content, Awaitable[analyze.Content]], + ]: + raise NotImplementedError() + + @property + def update_content( + self, + ) -> Callable[ + [gcd_content.UpdateContentRequest], + Union[analyze.Content, Awaitable[analyze.Content]], + ]: + raise NotImplementedError() + + @property + def delete_content( + self, + ) -> Callable[ + [content.DeleteContentRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def get_content( + self, + ) -> Callable[ + [content.GetContentRequest], Union[analyze.Content, Awaitable[analyze.Content]] + ]: + raise NotImplementedError() + + @property + def list_content( + self, + ) -> Callable[ + [content.ListContentRequest], + Union[content.ListContentResponse, Awaitable[content.ListContentResponse]], + ]: + raise NotImplementedError() + + +__all__ = ("ContentServiceTransport",) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc.py new file mode 100644 index 000000000000..0bedb443f07f --- /dev/null +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc.py @@ -0,0 +1,366 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import content +from google.cloud.dataplex_v1.types import content as gcd_content +from google.protobuf import empty_pb2 # type: ignore +from .base import ContentServiceTransport, DEFAULT_CLIENT_INFO + + +class ContentServiceGrpcTransport(ContentServiceTransport): + """gRPC backend transport for ContentService. + + ContentService manages Notebook and SQL Scripts for Dataplex. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "dataplex.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "dataplex.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def create_content( + self, + ) -> Callable[[gcd_content.CreateContentRequest], analyze.Content]: + r"""Return a callable for the create content method over gRPC. + + Create a content. + + Returns: + Callable[[~.CreateContentRequest], + ~.Content]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_content" not in self._stubs: + self._stubs["create_content"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.ContentService/CreateContent", + request_serializer=gcd_content.CreateContentRequest.serialize, + response_deserializer=analyze.Content.deserialize, + ) + return self._stubs["create_content"] + + @property + def update_content( + self, + ) -> Callable[[gcd_content.UpdateContentRequest], analyze.Content]: + r"""Return a callable for the update content method over gRPC. + + Update a content. Only supports full resource update. + + Returns: + Callable[[~.UpdateContentRequest], + ~.Content]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_content" not in self._stubs: + self._stubs["update_content"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.ContentService/UpdateContent", + request_serializer=gcd_content.UpdateContentRequest.serialize, + response_deserializer=analyze.Content.deserialize, + ) + return self._stubs["update_content"] + + @property + def delete_content( + self, + ) -> Callable[[content.DeleteContentRequest], empty_pb2.Empty]: + r"""Return a callable for the delete content method over gRPC. + + Delete a content. + + Returns: + Callable[[~.DeleteContentRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_content" not in self._stubs: + self._stubs["delete_content"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.ContentService/DeleteContent", + request_serializer=content.DeleteContentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_content"] + + @property + def get_content(self) -> Callable[[content.GetContentRequest], analyze.Content]: + r"""Return a callable for the get content method over gRPC. + + Get a content resource. + + Returns: + Callable[[~.GetContentRequest], + ~.Content]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_content" not in self._stubs: + self._stubs["get_content"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.ContentService/GetContent", + request_serializer=content.GetContentRequest.serialize, + response_deserializer=analyze.Content.deserialize, + ) + return self._stubs["get_content"] + + @property + def list_content( + self, + ) -> Callable[[content.ListContentRequest], content.ListContentResponse]: + r"""Return a callable for the list content method over gRPC. + + List content. + + Returns: + Callable[[~.ListContentRequest], + ~.ListContentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_content" not in self._stubs: + self._stubs["list_content"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.ContentService/ListContent", + request_serializer=content.ListContentRequest.serialize, + response_deserializer=content.ListContentResponse.deserialize, + ) + return self._stubs["list_content"] + + def close(self): + self.grpc_channel.close() + + +__all__ = ("ContentServiceGrpcTransport",) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc_asyncio.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..37060e7dd9cc --- /dev/null +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc_asyncio.py @@ -0,0 +1,370 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import content +from google.cloud.dataplex_v1.types import content as gcd_content +from google.protobuf import empty_pb2 # type: ignore +from .base import ContentServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import ContentServiceGrpcTransport + + +class ContentServiceGrpcAsyncIOTransport(ContentServiceTransport): + """gRPC AsyncIO backend transport for ContentService. + + ContentService manages Notebook and SQL Scripts for Dataplex. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "dataplex.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "dataplex.googleapis.com", + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_content( + self, + ) -> Callable[[gcd_content.CreateContentRequest], Awaitable[analyze.Content]]: + r"""Return a callable for the create content method over gRPC. + + Create a content. + + Returns: + Callable[[~.CreateContentRequest], + Awaitable[~.Content]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_content" not in self._stubs: + self._stubs["create_content"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.ContentService/CreateContent", + request_serializer=gcd_content.CreateContentRequest.serialize, + response_deserializer=analyze.Content.deserialize, + ) + return self._stubs["create_content"] + + @property + def update_content( + self, + ) -> Callable[[gcd_content.UpdateContentRequest], Awaitable[analyze.Content]]: + r"""Return a callable for the update content method over gRPC. + + Update a content. Only supports full resource update. + + Returns: + Callable[[~.UpdateContentRequest], + Awaitable[~.Content]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_content" not in self._stubs: + self._stubs["update_content"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.ContentService/UpdateContent", + request_serializer=gcd_content.UpdateContentRequest.serialize, + response_deserializer=analyze.Content.deserialize, + ) + return self._stubs["update_content"] + + @property + def delete_content( + self, + ) -> Callable[[content.DeleteContentRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete content method over gRPC. + + Delete a content. + + Returns: + Callable[[~.DeleteContentRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_content" not in self._stubs: + self._stubs["delete_content"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.ContentService/DeleteContent", + request_serializer=content.DeleteContentRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_content"] + + @property + def get_content( + self, + ) -> Callable[[content.GetContentRequest], Awaitable[analyze.Content]]: + r"""Return a callable for the get content method over gRPC. + + Get a content resource. + + Returns: + Callable[[~.GetContentRequest], + Awaitable[~.Content]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_content" not in self._stubs: + self._stubs["get_content"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.ContentService/GetContent", + request_serializer=content.GetContentRequest.serialize, + response_deserializer=analyze.Content.deserialize, + ) + return self._stubs["get_content"] + + @property + def list_content( + self, + ) -> Callable[[content.ListContentRequest], Awaitable[content.ListContentResponse]]: + r"""Return a callable for the list content method over gRPC. + + List content. + + Returns: + Callable[[~.ListContentRequest], + Awaitable[~.ListContentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_content" not in self._stubs: + self._stubs["list_content"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.ContentService/ListContent", + request_serializer=content.ListContentRequest.serialize, + response_deserializer=content.ListContentResponse.deserialize, + ) + return self._stubs["list_content"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ("ContentServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/async_client.py index 661d5662adaa..4439227d24fd 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/async_client.py @@ -34,6 +34,7 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.dataplex_v1.services.dataplex_service import pagers +from google.cloud.dataplex_v1.types import analyze from google.cloud.dataplex_v1.types import resources from google.cloud.dataplex_v1.types import service from google.cloud.dataplex_v1.types import tasks @@ -63,10 +64,14 @@ class DataplexServiceAsyncClient: parse_action_path = staticmethod(DataplexServiceClient.parse_action_path) asset_path = staticmethod(DataplexServiceClient.asset_path) parse_asset_path = staticmethod(DataplexServiceClient.parse_asset_path) + environment_path = staticmethod(DataplexServiceClient.environment_path) + parse_environment_path = staticmethod(DataplexServiceClient.parse_environment_path) job_path = staticmethod(DataplexServiceClient.job_path) parse_job_path = staticmethod(DataplexServiceClient.parse_job_path) lake_path = staticmethod(DataplexServiceClient.lake_path) parse_lake_path = staticmethod(DataplexServiceClient.parse_lake_path) + session_path = staticmethod(DataplexServiceClient.session_path) + parse_session_path = staticmethod(DataplexServiceClient.parse_session_path) task_path = staticmethod(DataplexServiceClient.task_path) parse_task_path = staticmethod(DataplexServiceClient.parse_task_path) zone_path = staticmethod(DataplexServiceClient.zone_path) @@ -267,8 +272,7 @@ def sample_create_lake(): The request object. Create lake request. parent (:class:`str`): Required. The resource name of the lake location, of the - form: - ``projects/{project_number}/locations/{location_id}`` + form: projects/{project_number}/locations/{location_id} where ``location_id`` refers to a GCP region. This corresponds to the ``parent`` field @@ -3158,6 +3162,657 @@ def sample_cancel_job(): request, retry=retry, timeout=timeout, metadata=metadata, ) + async def create_environment( + self, + request: Union[service.CreateEnvironmentRequest, dict] = None, + *, + parent: str = None, + environment: analyze.Environment = None, + environment_id: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Create an environment resource. + + .. code-block:: + + from google.cloud import dataplex_v1 + + def sample_create_environment(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + environment = dataplex_v1.Environment() + environment.infrastructure_spec.os_image.image_version = "image_version_value" + + request = dataplex_v1.CreateEnvironmentRequest( + parent="parent_value", + environment_id="environment_id_value", + environment=environment, + ) + + # Make the request + operation = client.create_environment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateEnvironmentRequest, dict]): + The request object. Create environment request. + parent (:class:`str`): + Required. The resource name of the parent lake: + projects/{project_id}/locations/{location_id}/lakes/{lake_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + environment (:class:`google.cloud.dataplex_v1.types.Environment`): + Required. Environment resource. + This corresponds to the ``environment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + environment_id (:class:`str`): + Required. Environment identifier. + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the lake. + + This corresponds to the ``environment_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Environment` Environment represents a user-visible compute infrastructure for analytics + within a lake. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, environment, environment_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.CreateEnvironmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if environment is not None: + request.environment = environment + if environment_id is not None: + request.environment_id = environment_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_environment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + analyze.Environment, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_environment( + self, + request: Union[service.UpdateEnvironmentRequest, dict] = None, + *, + environment: analyze.Environment = None, + update_mask: field_mask_pb2.FieldMask = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Update the environment resource. + + .. code-block:: + + from google.cloud import dataplex_v1 + + def sample_update_environment(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + environment = dataplex_v1.Environment() + environment.infrastructure_spec.os_image.image_version = "image_version_value" + + request = dataplex_v1.UpdateEnvironmentRequest( + environment=environment, + ) + + # Make the request + operation = client.update_environment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateEnvironmentRequest, dict]): + The request object. Update environment request. + environment (:class:`google.cloud.dataplex_v1.types.Environment`): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + + This corresponds to the ``environment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Environment` Environment represents a user-visible compute infrastructure for analytics + within a lake. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([environment, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.UpdateEnvironmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if environment is not None: + request.environment = environment + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_environment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment.name", request.environment.name),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + analyze.Environment, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_environment( + self, + request: Union[service.DeleteEnvironmentRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Delete the environment resource. All the child + resources must have been deleted before environment + deletion can be initiated. + + + .. code-block:: + + from google.cloud import dataplex_v1 + + def sample_delete_environment(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEnvironmentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_environment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteEnvironmentRequest, dict]): + The request object. Delete environment request. + name (:class:`str`): + Required. The resource name of the environment: + projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environments/{environment_id}\` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + The JSON representation for Empty is empty JSON + object {}. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.DeleteEnvironmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_environment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_environments( + self, + request: Union[service.ListEnvironmentsRequest, dict] = None, + *, + parent: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEnvironmentsAsyncPager: + r"""Lists environments under the given lake. + + .. code-block:: + + from google.cloud import dataplex_v1 + + def sample_list_environments(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEnvironmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_environments(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListEnvironmentsRequest, dict]): + The request object. List environments request. + parent (:class:`str`): + Required. The resource name of the parent lake: + projects/{project_id}/locations/{location_id}/lakes/{lake_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListEnvironmentsAsyncPager: + List environments response. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.ListEnvironmentsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_environments, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListEnvironmentsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_environment( + self, + request: Union[service.GetEnvironmentRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analyze.Environment: + r"""Get environment resource. + + .. code-block:: + + from google.cloud import dataplex_v1 + + def sample_get_environment(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEnvironmentRequest( + name="name_value", + ) + + # Make the request + response = client.get_environment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetEnvironmentRequest, dict]): + The request object. Get environment request. + name (:class:`str`): + Required. The resource name of the environment: + projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environments/{environment_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Environment: + Environment represents a user-visible + compute infrastructure for analytics + within a lake. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.GetEnvironmentRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_environment, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_sessions( + self, + request: Union[service.ListSessionsRequest, dict] = None, + *, + parent: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSessionsAsyncPager: + r"""Lists session resources in an environment. + + .. code-block:: + + from google.cloud import dataplex_v1 + + def sample_list_sessions(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListSessionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sessions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListSessionsRequest, dict]): + The request object. List sessions request. + parent (:class:`str`): + Required. The resource name of the parent environment: + projects/{project_number}/locations/{location_id}/lakes/{lake_id}/environment/{environment_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListSessionsAsyncPager: + List sessions response. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = service.ListSessionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_sessions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSessionsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + async def __aenter__(self): return self diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/client.py index b952e2a7854e..37b31df46c17 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/client.py @@ -37,6 +37,7 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.dataplex_v1.services.dataplex_service import pagers +from google.cloud.dataplex_v1.types import analyze from google.cloud.dataplex_v1.types import resources from google.cloud.dataplex_v1.types import service from google.cloud.dataplex_v1.types import tasks @@ -206,6 +207,24 @@ def parse_asset_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def environment_path( + project: str, location: str, lake: str, environment: str, + ) -> str: + """Returns a fully-qualified environment string.""" + return "projects/{project}/locations/{location}/lakes/{lake}/environments/{environment}".format( + project=project, location=location, lake=lake, environment=environment, + ) + + @staticmethod + def parse_environment_path(path: str) -> Dict[str, str]: + """Parses a environment path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/environments/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def job_path(project: str, location: str, lake: str, task: str, job: str,) -> str: """Returns a fully-qualified job string.""" @@ -238,6 +257,28 @@ def parse_lake_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def session_path( + project: str, location: str, lake: str, environment: str, session: str, + ) -> str: + """Returns a fully-qualified session string.""" + return "projects/{project}/locations/{location}/lakes/{lake}/environments/{environment}/sessions/{session}".format( + project=project, + location=location, + lake=lake, + environment=environment, + session=session, + ) + + @staticmethod + def parse_session_path(path: str) -> Dict[str, str]: + """Parses a session path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/lakes/(?P.+?)/environments/(?P.+?)/sessions/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def task_path(project: str, location: str, lake: str, task: str,) -> str: """Returns a fully-qualified task string.""" @@ -537,8 +578,7 @@ def sample_create_lake(): The request object. Create lake request. parent (str): Required. The resource name of the lake location, of the - form: - ``projects/{project_number}/locations/{location_id}`` + form: projects/{project_number}/locations/{location_id} where ``location_id`` refers to a GCP region. This corresponds to the ``parent`` field @@ -3336,6 +3376,663 @@ def sample_cancel_job(): request, retry=retry, timeout=timeout, metadata=metadata, ) + def create_environment( + self, + request: Union[service.CreateEnvironmentRequest, dict] = None, + *, + parent: str = None, + environment: analyze.Environment = None, + environment_id: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Create an environment resource. + + + .. code-block:: + + from google.cloud import dataplex_v1 + + def sample_create_environment(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + environment = dataplex_v1.Environment() + environment.infrastructure_spec.os_image.image_version = "image_version_value" + + request = dataplex_v1.CreateEnvironmentRequest( + parent="parent_value", + environment_id="environment_id_value", + environment=environment, + ) + + # Make the request + operation = client.create_environment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateEnvironmentRequest, dict]): + The request object. Create environment request. + parent (str): + Required. The resource name of the parent lake: + projects/{project_id}/locations/{location_id}/lakes/{lake_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + environment (google.cloud.dataplex_v1.types.Environment): + Required. Environment resource. + This corresponds to the ``environment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + environment_id (str): + Required. Environment identifier. + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the lake. + + This corresponds to the ``environment_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Environment` Environment represents a user-visible compute infrastructure for analytics + within a lake. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, environment, environment_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.CreateEnvironmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.CreateEnvironmentRequest): + request = service.CreateEnvironmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if environment is not None: + request.environment = environment + if environment_id is not None: + request.environment_id = environment_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_environment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + analyze.Environment, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_environment( + self, + request: Union[service.UpdateEnvironmentRequest, dict] = None, + *, + environment: analyze.Environment = None, + update_mask: field_mask_pb2.FieldMask = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Update the environment resource. + + + .. code-block:: + + from google.cloud import dataplex_v1 + + def sample_update_environment(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + environment = dataplex_v1.Environment() + environment.infrastructure_spec.os_image.image_version = "image_version_value" + + request = dataplex_v1.UpdateEnvironmentRequest( + environment=environment, + ) + + # Make the request + operation = client.update_environment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateEnvironmentRequest, dict]): + The request object. Update environment request. + environment (google.cloud.dataplex_v1.types.Environment): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + + This corresponds to the ``environment`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Environment` Environment represents a user-visible compute infrastructure for analytics + within a lake. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([environment, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.UpdateEnvironmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.UpdateEnvironmentRequest): + request = service.UpdateEnvironmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if environment is not None: + request.environment = environment + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_environment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("environment.name", request.environment.name),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + analyze.Environment, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_environment( + self, + request: Union[service.DeleteEnvironmentRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Delete the environment resource. All the child + resources must have been deleted before environment + deletion can be initiated. + + + + .. code-block:: + + from google.cloud import dataplex_v1 + + def sample_delete_environment(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEnvironmentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_environment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteEnvironmentRequest, dict]): + The request object. Delete environment request. + name (str): + Required. The resource name of the environment: + projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environments/{environment_id}\` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + The JSON representation for Empty is empty JSON + object {}. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.DeleteEnvironmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.DeleteEnvironmentRequest): + request = service.DeleteEnvironmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_environment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_environments( + self, + request: Union[service.ListEnvironmentsRequest, dict] = None, + *, + parent: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListEnvironmentsPager: + r"""Lists environments under the given lake. + + + .. code-block:: + + from google.cloud import dataplex_v1 + + def sample_list_environments(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEnvironmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_environments(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListEnvironmentsRequest, dict]): + The request object. List environments request. + parent (str): + Required. The resource name of the parent lake: + projects/{project_id}/locations/{location_id}/lakes/{lake_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListEnvironmentsPager: + List environments response. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.ListEnvironmentsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.ListEnvironmentsRequest): + request = service.ListEnvironmentsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_environments] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListEnvironmentsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def get_environment( + self, + request: Union[service.GetEnvironmentRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> analyze.Environment: + r"""Get environment resource. + + + .. code-block:: + + from google.cloud import dataplex_v1 + + def sample_get_environment(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEnvironmentRequest( + name="name_value", + ) + + # Make the request + response = client.get_environment(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetEnvironmentRequest, dict]): + The request object. Get environment request. + name (str): + Required. The resource name of the environment: + projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environments/{environment_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Environment: + Environment represents a user-visible + compute infrastructure for analytics + within a lake. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.GetEnvironmentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.GetEnvironmentRequest): + request = service.GetEnvironmentRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_environment] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_sessions( + self, + request: Union[service.ListSessionsRequest, dict] = None, + *, + parent: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSessionsPager: + r"""Lists session resources in an environment. + + + .. code-block:: + + from google.cloud import dataplex_v1 + + def sample_list_sessions(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListSessionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sessions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListSessionsRequest, dict]): + The request object. List sessions request. + parent (str): + Required. The resource name of the parent environment: + projects/{project_number}/locations/{location_id}/lakes/{lake_id}/environment/{environment_id} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.services.dataplex_service.pagers.ListSessionsPager: + List sessions response. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a service.ListSessionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, service.ListSessionsRequest): + request = service.ListSessionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_sessions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSessionsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self): return self diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/pagers.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/pagers.py index ad07db08bb65..5e7f1f737cdf 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/pagers.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/pagers.py @@ -24,6 +24,7 @@ Iterator, ) +from google.cloud.dataplex_v1.types import analyze from google.cloud.dataplex_v1.types import resources from google.cloud.dataplex_v1.types import service from google.cloud.dataplex_v1.types import tasks @@ -1051,3 +1052,259 @@ async def async_generator(): def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListEnvironmentsPager: + """A pager for iterating through ``list_environments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListEnvironmentsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``environments`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListEnvironments`` requests and continue to iterate + through the ``environments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListEnvironmentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListEnvironmentsResponse], + request: service.ListEnvironmentsRequest, + response: service.ListEnvironmentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListEnvironmentsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListEnvironmentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListEnvironmentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListEnvironmentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[analyze.Environment]: + for page in self.pages: + yield from page.environments + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListEnvironmentsAsyncPager: + """A pager for iterating through ``list_environments`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListEnvironmentsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``environments`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListEnvironments`` requests and continue to iterate + through the ``environments`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListEnvironmentsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListEnvironmentsResponse]], + request: service.ListEnvironmentsRequest, + response: service.ListEnvironmentsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListEnvironmentsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListEnvironmentsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListEnvironmentsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListEnvironmentsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[analyze.Environment]: + async def async_generator(): + async for page in self.pages: + for response in page.environments: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSessionsPager: + """A pager for iterating through ``list_sessions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListSessionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``sessions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSessions`` requests and continue to iterate + through the ``sessions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListSessionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., service.ListSessionsResponse], + request: service.ListSessionsRequest, + response: service.ListSessionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListSessionsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListSessionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListSessionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[service.ListSessionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[analyze.Session]: + for page in self.pages: + yield from page.sessions + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSessionsAsyncPager: + """A pager for iterating through ``list_sessions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListSessionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``sessions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSessions`` requests and continue to iterate + through the ``sessions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListSessionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[service.ListSessionsResponse]], + request: service.ListSessionsRequest, + response: service.ListSessionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListSessionsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListSessionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = service.ListSessionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[service.ListSessionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[analyze.Session]: + async def async_generator(): + async for page in self.pages: + for response in page.sessions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/transports/base.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/transports/base.py index 76f68a0eab64..76f7d0b7b251 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/transports/base.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/transports/base.py @@ -26,6 +26,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +from google.cloud.dataplex_v1.types import analyze from google.cloud.dataplex_v1.types import resources from google.cloud.dataplex_v1.types import service from google.cloud.dataplex_v1.types import tasks @@ -344,6 +345,24 @@ def _prep_wrapped_messages(self, client_info): self.cancel_job: gapic_v1.method.wrap_method( self.cancel_job, default_timeout=60.0, client_info=client_info, ), + self.create_environment: gapic_v1.method.wrap_method( + self.create_environment, default_timeout=None, client_info=client_info, + ), + self.update_environment: gapic_v1.method.wrap_method( + self.update_environment, default_timeout=None, client_info=client_info, + ), + self.delete_environment: gapic_v1.method.wrap_method( + self.delete_environment, default_timeout=None, client_info=client_info, + ), + self.list_environments: gapic_v1.method.wrap_method( + self.list_environments, default_timeout=None, client_info=client_info, + ), + self.get_environment: gapic_v1.method.wrap_method( + self.get_environment, default_timeout=None, client_info=client_info, + ), + self.list_sessions: gapic_v1.method.wrap_method( + self.list_sessions, default_timeout=None, client_info=client_info, + ), } def close(self): @@ -584,5 +603,62 @@ def cancel_job( ]: raise NotImplementedError() + @property + def create_environment( + self, + ) -> Callable[ + [service.CreateEnvironmentRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_environment( + self, + ) -> Callable[ + [service.UpdateEnvironmentRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_environment( + self, + ) -> Callable[ + [service.DeleteEnvironmentRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_environments( + self, + ) -> Callable[ + [service.ListEnvironmentsRequest], + Union[ + service.ListEnvironmentsResponse, + Awaitable[service.ListEnvironmentsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_environment( + self, + ) -> Callable[ + [service.GetEnvironmentRequest], + Union[analyze.Environment, Awaitable[analyze.Environment]], + ]: + raise NotImplementedError() + + @property + def list_sessions( + self, + ) -> Callable[ + [service.ListSessionsRequest], + Union[service.ListSessionsResponse, Awaitable[service.ListSessionsResponse]], + ]: + raise NotImplementedError() + __all__ = ("DataplexServiceTransport",) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc.py index 94ca4d32ecc9..e6855b493f79 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc.py @@ -25,6 +25,7 @@ import grpc # type: ignore +from google.cloud.dataplex_v1.types import analyze from google.cloud.dataplex_v1.types import resources from google.cloud.dataplex_v1.types import service from google.cloud.dataplex_v1.types import tasks @@ -921,6 +922,164 @@ def cancel_job(self) -> Callable[[service.CancelJobRequest], empty_pb2.Empty]: ) return self._stubs["cancel_job"] + @property + def create_environment( + self, + ) -> Callable[[service.CreateEnvironmentRequest], operations_pb2.Operation]: + r"""Return a callable for the create environment method over gRPC. + + Create an environment resource. + + Returns: + Callable[[~.CreateEnvironmentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_environment" not in self._stubs: + self._stubs["create_environment"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.DataplexService/CreateEnvironment", + request_serializer=service.CreateEnvironmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_environment"] + + @property + def update_environment( + self, + ) -> Callable[[service.UpdateEnvironmentRequest], operations_pb2.Operation]: + r"""Return a callable for the update environment method over gRPC. + + Update the environment resource. + + Returns: + Callable[[~.UpdateEnvironmentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_environment" not in self._stubs: + self._stubs["update_environment"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.DataplexService/UpdateEnvironment", + request_serializer=service.UpdateEnvironmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_environment"] + + @property + def delete_environment( + self, + ) -> Callable[[service.DeleteEnvironmentRequest], operations_pb2.Operation]: + r"""Return a callable for the delete environment method over gRPC. + + Delete the environment resource. All the child + resources must have been deleted before environment + deletion can be initiated. + + Returns: + Callable[[~.DeleteEnvironmentRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_environment" not in self._stubs: + self._stubs["delete_environment"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.DataplexService/DeleteEnvironment", + request_serializer=service.DeleteEnvironmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_environment"] + + @property + def list_environments( + self, + ) -> Callable[[service.ListEnvironmentsRequest], service.ListEnvironmentsResponse]: + r"""Return a callable for the list environments method over gRPC. + + Lists environments under the given lake. + + Returns: + Callable[[~.ListEnvironmentsRequest], + ~.ListEnvironmentsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_environments" not in self._stubs: + self._stubs["list_environments"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.DataplexService/ListEnvironments", + request_serializer=service.ListEnvironmentsRequest.serialize, + response_deserializer=service.ListEnvironmentsResponse.deserialize, + ) + return self._stubs["list_environments"] + + @property + def get_environment( + self, + ) -> Callable[[service.GetEnvironmentRequest], analyze.Environment]: + r"""Return a callable for the get environment method over gRPC. + + Get environment resource. + + Returns: + Callable[[~.GetEnvironmentRequest], + ~.Environment]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_environment" not in self._stubs: + self._stubs["get_environment"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.DataplexService/GetEnvironment", + request_serializer=service.GetEnvironmentRequest.serialize, + response_deserializer=analyze.Environment.deserialize, + ) + return self._stubs["get_environment"] + + @property + def list_sessions( + self, + ) -> Callable[[service.ListSessionsRequest], service.ListSessionsResponse]: + r"""Return a callable for the list sessions method over gRPC. + + Lists session resources in an environment. + + Returns: + Callable[[~.ListSessionsRequest], + ~.ListSessionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_sessions" not in self._stubs: + self._stubs["list_sessions"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.DataplexService/ListSessions", + request_serializer=service.ListSessionsRequest.serialize, + response_deserializer=service.ListSessionsResponse.deserialize, + ) + return self._stubs["list_sessions"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc_asyncio.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc_asyncio.py index 9a910843d7a5..5c3ff5ef0428 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/transports/grpc_asyncio.py @@ -25,6 +25,7 @@ import grpc # type: ignore from grpc.experimental import aio # type: ignore +from google.cloud.dataplex_v1.types import analyze from google.cloud.dataplex_v1.types import resources from google.cloud.dataplex_v1.types import service from google.cloud.dataplex_v1.types import tasks @@ -935,6 +936,174 @@ def cancel_job( ) return self._stubs["cancel_job"] + @property + def create_environment( + self, + ) -> Callable[ + [service.CreateEnvironmentRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create environment method over gRPC. + + Create an environment resource. + + Returns: + Callable[[~.CreateEnvironmentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_environment" not in self._stubs: + self._stubs["create_environment"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.DataplexService/CreateEnvironment", + request_serializer=service.CreateEnvironmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_environment"] + + @property + def update_environment( + self, + ) -> Callable[ + [service.UpdateEnvironmentRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update environment method over gRPC. + + Update the environment resource. + + Returns: + Callable[[~.UpdateEnvironmentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_environment" not in self._stubs: + self._stubs["update_environment"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.DataplexService/UpdateEnvironment", + request_serializer=service.UpdateEnvironmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_environment"] + + @property + def delete_environment( + self, + ) -> Callable[ + [service.DeleteEnvironmentRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete environment method over gRPC. + + Delete the environment resource. All the child + resources must have been deleted before environment + deletion can be initiated. + + Returns: + Callable[[~.DeleteEnvironmentRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_environment" not in self._stubs: + self._stubs["delete_environment"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.DataplexService/DeleteEnvironment", + request_serializer=service.DeleteEnvironmentRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_environment"] + + @property + def list_environments( + self, + ) -> Callable[ + [service.ListEnvironmentsRequest], Awaitable[service.ListEnvironmentsResponse] + ]: + r"""Return a callable for the list environments method over gRPC. + + Lists environments under the given lake. + + Returns: + Callable[[~.ListEnvironmentsRequest], + Awaitable[~.ListEnvironmentsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_environments" not in self._stubs: + self._stubs["list_environments"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.DataplexService/ListEnvironments", + request_serializer=service.ListEnvironmentsRequest.serialize, + response_deserializer=service.ListEnvironmentsResponse.deserialize, + ) + return self._stubs["list_environments"] + + @property + def get_environment( + self, + ) -> Callable[[service.GetEnvironmentRequest], Awaitable[analyze.Environment]]: + r"""Return a callable for the get environment method over gRPC. + + Get environment resource. + + Returns: + Callable[[~.GetEnvironmentRequest], + Awaitable[~.Environment]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_environment" not in self._stubs: + self._stubs["get_environment"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.DataplexService/GetEnvironment", + request_serializer=service.GetEnvironmentRequest.serialize, + response_deserializer=analyze.Environment.deserialize, + ) + return self._stubs["get_environment"] + + @property + def list_sessions( + self, + ) -> Callable[ + [service.ListSessionsRequest], Awaitable[service.ListSessionsResponse] + ]: + r"""Return a callable for the list sessions method over gRPC. + + Lists session resources in an environment. + + Returns: + Callable[[~.ListSessionsRequest], + Awaitable[~.ListSessionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_sessions" not in self._stubs: + self._stubs["list_sessions"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.DataplexService/ListSessions", + request_serializer=service.ListSessionsRequest.serialize, + response_deserializer=service.ListSessionsResponse.deserialize, + ) + return self._stubs["list_sessions"] + def close(self): return self.grpc_channel.close() diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/async_client.py index 3cb31a283101..bafd471b7160 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/async_client.py @@ -209,6 +209,277 @@ def __init__( client_info=client_info, ) + async def create_entity( + self, + request: Union[metadata_.CreateEntityRequest, dict] = None, + *, + parent: str = None, + entity: metadata_.Entity = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metadata_.Entity: + r"""Create a metadata entity. + + .. code-block:: + + from google.cloud import dataplex_v1 + + def sample_create_entity(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + entity = dataplex_v1.Entity() + entity.id = "id_value" + entity.type_ = "FILESET" + entity.asset = "asset_value" + entity.data_path = "data_path_value" + entity.system = "BIGQUERY" + entity.format_.mime_type = "mime_type_value" + entity.schema.user_managed = True + + request = dataplex_v1.CreateEntityRequest( + parent="parent_value", + entity=entity, + ) + + # Make the request + response = client.create_entity(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateEntityRequest, dict]): + The request object. Create a metadata entity request. + parent (:class:`str`): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entity (:class:`google.cloud.dataplex_v1.types.Entity`): + Required. Entity resource. + This corresponds to the ``entity`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Entity: + Represents tables and fileset + metadata contained within a zone. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, entity]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metadata_.CreateEntityRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entity is not None: + request.entity = entity + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_entity, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def update_entity( + self, + request: Union[metadata_.UpdateEntityRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metadata_.Entity: + r"""Update a metadata entity. Only supports full resource + update. + + + .. code-block:: + + from google.cloud import dataplex_v1 + + def sample_update_entity(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + entity = dataplex_v1.Entity() + entity.id = "id_value" + entity.type_ = "FILESET" + entity.asset = "asset_value" + entity.data_path = "data_path_value" + entity.system = "BIGQUERY" + entity.format_.mime_type = "mime_type_value" + entity.schema.user_managed = True + + request = dataplex_v1.UpdateEntityRequest( + entity=entity, + ) + + # Make the request + response = client.update_entity(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateEntityRequest, dict]): + The request object. Update a metadata entity request. + The exiting entity will be fully replaced by the entity + in the request. The entity ID is mutable. To modify the + ID, use the current entity ID in the request URL and + specify the new ID in the request body. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Entity: + Represents tables and fileset + metadata contained within a zone. + + """ + # Create or coerce a protobuf request object. + request = metadata_.UpdateEntityRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_entity, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("entity.name", request.entity.name),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_entity( + self, + request: Union[metadata_.DeleteEntityRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete a metadata entity. + + .. code-block:: + + from google.cloud import dataplex_v1 + + def sample_delete_entity(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntityRequest( + name="name_value", + etag="etag_value", + ) + + # Make the request + client.delete_entity(request=request) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteEntityRequest, dict]): + The request object. Delete a metadata entity request. + name (:class:`str`): + Required. The resource name of the entity: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metadata_.DeleteEntityRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_entity, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + async def get_entity( self, request: Union[metadata_.GetEntityRequest, dict] = None, @@ -415,6 +686,190 @@ def sample_list_entities(): # Done; return the response. return response + async def create_partition( + self, + request: Union[metadata_.CreatePartitionRequest, dict] = None, + *, + parent: str = None, + partition: metadata_.Partition = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metadata_.Partition: + r"""Create a metadata partition. + + .. code-block:: + + from google.cloud import dataplex_v1 + + def sample_create_partition(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + partition = dataplex_v1.Partition() + partition.values = ['values_value_1', 'values_value_2'] + partition.location = "location_value" + + request = dataplex_v1.CreatePartitionRequest( + parent="parent_value", + partition=partition, + ) + + # Make the request + response = client.create_partition(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreatePartitionRequest, dict]): + The request object. Create metadata partition request. + parent (:class:`str`): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + partition (:class:`google.cloud.dataplex_v1.types.Partition`): + Required. Partition resource. + This corresponds to the ``partition`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Partition: + Represents partition metadata + contained within entity instances. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, partition]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metadata_.CreatePartitionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if partition is not None: + request.partition = partition + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_partition, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_partition( + self, + request: Union[metadata_.DeletePartitionRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete a metadata partition. + + .. code-block:: + + from google.cloud import dataplex_v1 + + def sample_delete_partition(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeletePartitionRequest( + name="name_value", + ) + + # Make the request + client.delete_partition(request=request) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeletePartitionRequest, dict]): + The request object. Delete metadata partition request. + name (:class:`str`): + Required. The resource name of the partition. format: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}/partitions/{partition_value_path}``. + The {partition_value_path} segment consists of an + ordered sequence of partition values separated by "/". + All values must be provided. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = metadata_.DeletePartitionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_partition, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + async def get_partition( self, request: Union[metadata_.GetPartitionRequest, dict] = None, @@ -450,7 +905,10 @@ def sample_get_partition(): The request object. Get metadata partition request. name (:class:`str`): Required. The resource name of the partition: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}/partitions/{partition_id}``. + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}/partitions/{partition_value_path}``. + The {partition_value_path} segment consists of an + ordered sequence of partition values separated by "/". + All values must be provided. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/client.py index 1f109ad8514a..aecc64536980 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/client.py @@ -443,6 +443,281 @@ def __init__( always_use_jwt_access=True, ) + def create_entity( + self, + request: Union[metadata_.CreateEntityRequest, dict] = None, + *, + parent: str = None, + entity: metadata_.Entity = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metadata_.Entity: + r"""Create a metadata entity. + + + .. code-block:: + + from google.cloud import dataplex_v1 + + def sample_create_entity(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + entity = dataplex_v1.Entity() + entity.id = "id_value" + entity.type_ = "FILESET" + entity.asset = "asset_value" + entity.data_path = "data_path_value" + entity.system = "BIGQUERY" + entity.format_.mime_type = "mime_type_value" + entity.schema.user_managed = True + + request = dataplex_v1.CreateEntityRequest( + parent="parent_value", + entity=entity, + ) + + # Make the request + response = client.create_entity(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateEntityRequest, dict]): + The request object. Create a metadata entity request. + parent (str): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entity (google.cloud.dataplex_v1.types.Entity): + Required. Entity resource. + This corresponds to the ``entity`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Entity: + Represents tables and fileset + metadata contained within a zone. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, entity]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metadata_.CreateEntityRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metadata_.CreateEntityRequest): + request = metadata_.CreateEntityRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entity is not None: + request.entity = entity + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_entity] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def update_entity( + self, + request: Union[metadata_.UpdateEntityRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metadata_.Entity: + r"""Update a metadata entity. Only supports full resource + update. + + + + .. code-block:: + + from google.cloud import dataplex_v1 + + def sample_update_entity(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + entity = dataplex_v1.Entity() + entity.id = "id_value" + entity.type_ = "FILESET" + entity.asset = "asset_value" + entity.data_path = "data_path_value" + entity.system = "BIGQUERY" + entity.format_.mime_type = "mime_type_value" + entity.schema.user_managed = True + + request = dataplex_v1.UpdateEntityRequest( + entity=entity, + ) + + # Make the request + response = client.update_entity(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateEntityRequest, dict]): + The request object. Update a metadata entity request. + The exiting entity will be fully replaced by the entity + in the request. The entity ID is mutable. To modify the + ID, use the current entity ID in the request URL and + specify the new ID in the request body. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Entity: + Represents tables and fileset + metadata contained within a zone. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a metadata_.UpdateEntityRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metadata_.UpdateEntityRequest): + request = metadata_.UpdateEntityRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_entity] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("entity.name", request.entity.name),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_entity( + self, + request: Union[metadata_.DeleteEntityRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete a metadata entity. + + + .. code-block:: + + from google.cloud import dataplex_v1 + + def sample_delete_entity(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntityRequest( + name="name_value", + etag="etag_value", + ) + + # Make the request + client.delete_entity(request=request) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteEntityRequest, dict]): + The request object. Delete a metadata entity request. + name (str): + Required. The resource name of the entity: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metadata_.DeleteEntityRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metadata_.DeleteEntityRequest): + request = metadata_.DeleteEntityRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_entity] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + def get_entity( self, request: Union[metadata_.GetEntityRequest, dict] = None, @@ -633,6 +908,192 @@ def sample_list_entities(): # Done; return the response. return response + def create_partition( + self, + request: Union[metadata_.CreatePartitionRequest, dict] = None, + *, + parent: str = None, + partition: metadata_.Partition = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> metadata_.Partition: + r"""Create a metadata partition. + + + .. code-block:: + + from google.cloud import dataplex_v1 + + def sample_create_partition(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + partition = dataplex_v1.Partition() + partition.values = ['values_value_1', 'values_value_2'] + partition.location = "location_value" + + request = dataplex_v1.CreatePartitionRequest( + parent="parent_value", + partition=partition, + ) + + # Make the request + response = client.create_partition(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreatePartitionRequest, dict]): + The request object. Create metadata partition request. + parent (str): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + partition (google.cloud.dataplex_v1.types.Partition): + Required. Partition resource. + This corresponds to the ``partition`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dataplex_v1.types.Partition: + Represents partition metadata + contained within entity instances. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, partition]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metadata_.CreatePartitionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metadata_.CreatePartitionRequest): + request = metadata_.CreatePartitionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if partition is not None: + request.partition = partition + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_partition] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_partition( + self, + request: Union[metadata_.DeletePartitionRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Delete a metadata partition. + + + .. code-block:: + + from google.cloud import dataplex_v1 + + def sample_delete_partition(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeletePartitionRequest( + name="name_value", + ) + + # Make the request + client.delete_partition(request=request) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeletePartitionRequest, dict]): + The request object. Delete metadata partition request. + name (str): + Required. The resource name of the partition. format: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}/partitions/{partition_value_path}``. + The {partition_value_path} segment consists of an + ordered sequence of partition values separated by "/". + All values must be provided. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a metadata_.DeletePartitionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, metadata_.DeletePartitionRequest): + request = metadata_.DeletePartitionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_partition] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + def get_partition( self, request: Union[metadata_.GetPartitionRequest, dict] = None, @@ -669,7 +1130,10 @@ def sample_get_partition(): The request object. Get metadata partition request. name (str): Required. The resource name of the partition: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}/partitions/{partition_id}``. + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}/partitions/{partition_value_path}``. + The {partition_value_path} segment consists of an + ordered sequence of partition values separated by "/". + All values must be provided. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/base.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/base.py index 8a5e7cae130a..9c39d23abfe3 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/base.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/base.py @@ -26,6 +26,7 @@ from google.oauth2 import service_account # type: ignore from google.cloud.dataplex_v1.types import metadata_ +from google.protobuf import empty_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -118,6 +119,15 @@ def __init__( def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { + self.create_entity: gapic_v1.method.wrap_method( + self.create_entity, default_timeout=60.0, client_info=client_info, + ), + self.update_entity: gapic_v1.method.wrap_method( + self.update_entity, default_timeout=60.0, client_info=client_info, + ), + self.delete_entity: gapic_v1.method.wrap_method( + self.delete_entity, default_timeout=60.0, client_info=client_info, + ), self.get_entity: gapic_v1.method.wrap_method( self.get_entity, default_retry=retries.Retry( @@ -146,6 +156,12 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.create_partition: gapic_v1.method.wrap_method( + self.create_partition, default_timeout=60.0, client_info=client_info, + ), + self.delete_partition: gapic_v1.method.wrap_method( + self.delete_partition, default_timeout=60.0, client_info=client_info, + ), self.get_partition: gapic_v1.method.wrap_method( self.get_partition, default_retry=retries.Retry( @@ -185,6 +201,33 @@ def close(self): """ raise NotImplementedError() + @property + def create_entity( + self, + ) -> Callable[ + [metadata_.CreateEntityRequest], + Union[metadata_.Entity, Awaitable[metadata_.Entity]], + ]: + raise NotImplementedError() + + @property + def update_entity( + self, + ) -> Callable[ + [metadata_.UpdateEntityRequest], + Union[metadata_.Entity, Awaitable[metadata_.Entity]], + ]: + raise NotImplementedError() + + @property + def delete_entity( + self, + ) -> Callable[ + [metadata_.DeleteEntityRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + @property def get_entity( self, @@ -205,6 +248,24 @@ def list_entities( ]: raise NotImplementedError() + @property + def create_partition( + self, + ) -> Callable[ + [metadata_.CreatePartitionRequest], + Union[metadata_.Partition, Awaitable[metadata_.Partition]], + ]: + raise NotImplementedError() + + @property + def delete_partition( + self, + ) -> Callable[ + [metadata_.DeletePartitionRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + @property def get_partition( self, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/grpc.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/grpc.py index 2c0e85fa294b..b9dac8a1ea57 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/grpc.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/grpc.py @@ -25,6 +25,7 @@ import grpc # type: ignore from google.cloud.dataplex_v1.types import metadata_ +from google.protobuf import empty_pb2 # type: ignore from .base import MetadataServiceTransport, DEFAULT_CLIENT_INFO @@ -229,6 +230,85 @@ def grpc_channel(self) -> grpc.Channel: """ return self._grpc_channel + @property + def create_entity( + self, + ) -> Callable[[metadata_.CreateEntityRequest], metadata_.Entity]: + r"""Return a callable for the create entity method over gRPC. + + Create a metadata entity. + + Returns: + Callable[[~.CreateEntityRequest], + ~.Entity]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_entity" not in self._stubs: + self._stubs["create_entity"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.MetadataService/CreateEntity", + request_serializer=metadata_.CreateEntityRequest.serialize, + response_deserializer=metadata_.Entity.deserialize, + ) + return self._stubs["create_entity"] + + @property + def update_entity( + self, + ) -> Callable[[metadata_.UpdateEntityRequest], metadata_.Entity]: + r"""Return a callable for the update entity method over gRPC. + + Update a metadata entity. Only supports full resource + update. + + Returns: + Callable[[~.UpdateEntityRequest], + ~.Entity]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_entity" not in self._stubs: + self._stubs["update_entity"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.MetadataService/UpdateEntity", + request_serializer=metadata_.UpdateEntityRequest.serialize, + response_deserializer=metadata_.Entity.deserialize, + ) + return self._stubs["update_entity"] + + @property + def delete_entity( + self, + ) -> Callable[[metadata_.DeleteEntityRequest], empty_pb2.Empty]: + r"""Return a callable for the delete entity method over gRPC. + + Delete a metadata entity. + + Returns: + Callable[[~.DeleteEntityRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_entity" not in self._stubs: + self._stubs["delete_entity"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.MetadataService/DeleteEntity", + request_serializer=metadata_.DeleteEntityRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_entity"] + @property def get_entity(self) -> Callable[[metadata_.GetEntityRequest], metadata_.Entity]: r"""Return a callable for the get entity method over gRPC. @@ -279,6 +359,58 @@ def list_entities( ) return self._stubs["list_entities"] + @property + def create_partition( + self, + ) -> Callable[[metadata_.CreatePartitionRequest], metadata_.Partition]: + r"""Return a callable for the create partition method over gRPC. + + Create a metadata partition. + + Returns: + Callable[[~.CreatePartitionRequest], + ~.Partition]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_partition" not in self._stubs: + self._stubs["create_partition"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.MetadataService/CreatePartition", + request_serializer=metadata_.CreatePartitionRequest.serialize, + response_deserializer=metadata_.Partition.deserialize, + ) + return self._stubs["create_partition"] + + @property + def delete_partition( + self, + ) -> Callable[[metadata_.DeletePartitionRequest], empty_pb2.Empty]: + r"""Return a callable for the delete partition method over gRPC. + + Delete a metadata partition. + + Returns: + Callable[[~.DeletePartitionRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_partition" not in self._stubs: + self._stubs["delete_partition"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.MetadataService/DeletePartition", + request_serializer=metadata_.DeletePartitionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_partition"] + @property def get_partition( self, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/grpc_asyncio.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/grpc_asyncio.py index 4f54c59982ae..74d970807a54 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/grpc_asyncio.py @@ -25,6 +25,7 @@ from grpc.experimental import aio # type: ignore from google.cloud.dataplex_v1.types import metadata_ +from google.protobuf import empty_pb2 # type: ignore from .base import MetadataServiceTransport, DEFAULT_CLIENT_INFO from .grpc import MetadataServiceGrpcTransport @@ -231,6 +232,85 @@ def grpc_channel(self) -> aio.Channel: # Return the channel from cache. return self._grpc_channel + @property + def create_entity( + self, + ) -> Callable[[metadata_.CreateEntityRequest], Awaitable[metadata_.Entity]]: + r"""Return a callable for the create entity method over gRPC. + + Create a metadata entity. + + Returns: + Callable[[~.CreateEntityRequest], + Awaitable[~.Entity]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_entity" not in self._stubs: + self._stubs["create_entity"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.MetadataService/CreateEntity", + request_serializer=metadata_.CreateEntityRequest.serialize, + response_deserializer=metadata_.Entity.deserialize, + ) + return self._stubs["create_entity"] + + @property + def update_entity( + self, + ) -> Callable[[metadata_.UpdateEntityRequest], Awaitable[metadata_.Entity]]: + r"""Return a callable for the update entity method over gRPC. + + Update a metadata entity. Only supports full resource + update. + + Returns: + Callable[[~.UpdateEntityRequest], + Awaitable[~.Entity]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_entity" not in self._stubs: + self._stubs["update_entity"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.MetadataService/UpdateEntity", + request_serializer=metadata_.UpdateEntityRequest.serialize, + response_deserializer=metadata_.Entity.deserialize, + ) + return self._stubs["update_entity"] + + @property + def delete_entity( + self, + ) -> Callable[[metadata_.DeleteEntityRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete entity method over gRPC. + + Delete a metadata entity. + + Returns: + Callable[[~.DeleteEntityRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_entity" not in self._stubs: + self._stubs["delete_entity"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.MetadataService/DeleteEntity", + request_serializer=metadata_.DeleteEntityRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_entity"] + @property def get_entity( self, @@ -285,6 +365,58 @@ def list_entities( ) return self._stubs["list_entities"] + @property + def create_partition( + self, + ) -> Callable[[metadata_.CreatePartitionRequest], Awaitable[metadata_.Partition]]: + r"""Return a callable for the create partition method over gRPC. + + Create a metadata partition. + + Returns: + Callable[[~.CreatePartitionRequest], + Awaitable[~.Partition]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_partition" not in self._stubs: + self._stubs["create_partition"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.MetadataService/CreatePartition", + request_serializer=metadata_.CreatePartitionRequest.serialize, + response_deserializer=metadata_.Partition.deserialize, + ) + return self._stubs["create_partition"] + + @property + def delete_partition( + self, + ) -> Callable[[metadata_.DeletePartitionRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete partition method over gRPC. + + Delete a metadata partition. + + Returns: + Callable[[~.DeletePartitionRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_partition" not in self._stubs: + self._stubs["delete_partition"] = self.grpc_channel.unary_unary( + "/google.cloud.dataplex.v1.MetadataService/DeletePartition", + request_serializer=metadata_.DeletePartitionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_partition"] + @property def get_partition( self, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/__init__.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/__init__.py index 4b8bb4f03466..3acaa7e85bc3 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/__init__.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/__init__.py @@ -13,12 +13,29 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from .analyze import ( + Content, + Environment, + Session, +) +from .content import ( + CreateContentRequest, + DeleteContentRequest, + GetContentRequest, + ListContentRequest, + ListContentResponse, + UpdateContentRequest, +) from .logs import ( DiscoveryEvent, JobEvent, SessionEvent, ) from .metadata_ import ( + CreateEntityRequest, + CreatePartitionRequest, + DeleteEntityRequest, + DeletePartitionRequest, Entity, GetEntityRequest, GetPartitionRequest, @@ -29,6 +46,7 @@ Partition, Schema, StorageFormat, + UpdateEntityRequest, StorageSystem, ) from .resources import ( @@ -42,14 +60,17 @@ from .service import ( CancelJobRequest, CreateAssetRequest, + CreateEnvironmentRequest, CreateLakeRequest, CreateTaskRequest, CreateZoneRequest, DeleteAssetRequest, + DeleteEnvironmentRequest, DeleteLakeRequest, DeleteTaskRequest, DeleteZoneRequest, GetAssetRequest, + GetEnvironmentRequest, GetJobRequest, GetLakeRequest, GetTaskRequest, @@ -58,11 +79,15 @@ ListAssetActionsRequest, ListAssetsRequest, ListAssetsResponse, + ListEnvironmentsRequest, + ListEnvironmentsResponse, ListJobsRequest, ListJobsResponse, ListLakeActionsRequest, ListLakesRequest, ListLakesResponse, + ListSessionsRequest, + ListSessionsResponse, ListTasksRequest, ListTasksResponse, ListZoneActionsRequest, @@ -70,6 +95,7 @@ ListZonesResponse, OperationMetadata, UpdateAssetRequest, + UpdateEnvironmentRequest, UpdateLakeRequest, UpdateTaskRequest, UpdateZoneRequest, @@ -80,9 +106,22 @@ ) __all__ = ( + "Content", + "Environment", + "Session", + "CreateContentRequest", + "DeleteContentRequest", + "GetContentRequest", + "ListContentRequest", + "ListContentResponse", + "UpdateContentRequest", "DiscoveryEvent", "JobEvent", "SessionEvent", + "CreateEntityRequest", + "CreatePartitionRequest", + "DeleteEntityRequest", + "DeletePartitionRequest", "Entity", "GetEntityRequest", "GetPartitionRequest", @@ -93,6 +132,7 @@ "Partition", "Schema", "StorageFormat", + "UpdateEntityRequest", "StorageSystem", "Action", "Asset", @@ -102,14 +142,17 @@ "State", "CancelJobRequest", "CreateAssetRequest", + "CreateEnvironmentRequest", "CreateLakeRequest", "CreateTaskRequest", "CreateZoneRequest", "DeleteAssetRequest", + "DeleteEnvironmentRequest", "DeleteLakeRequest", "DeleteTaskRequest", "DeleteZoneRequest", "GetAssetRequest", + "GetEnvironmentRequest", "GetJobRequest", "GetLakeRequest", "GetTaskRequest", @@ -118,11 +161,15 @@ "ListAssetActionsRequest", "ListAssetsRequest", "ListAssetsResponse", + "ListEnvironmentsRequest", + "ListEnvironmentsResponse", "ListJobsRequest", "ListJobsResponse", "ListLakeActionsRequest", "ListLakesRequest", "ListLakesResponse", + "ListSessionsRequest", + "ListSessionsResponse", "ListTasksRequest", "ListTasksResponse", "ListZoneActionsRequest", @@ -130,6 +177,7 @@ "ListZonesResponse", "OperationMetadata", "UpdateAssetRequest", + "UpdateEnvironmentRequest", "UpdateLakeRequest", "UpdateTaskRequest", "UpdateZoneRequest", diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/analyze.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/analyze.py new file mode 100644 index 000000000000..e07381dde455 --- /dev/null +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/analyze.py @@ -0,0 +1,340 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.dataplex_v1.types import resources +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.dataplex.v1", manifest={"Environment", "Content", "Session",}, +) + + +class Environment(proto.Message): + r"""Environment represents a user-visible compute infrastructure + for analytics within a lake. + + Attributes: + name (str): + Output only. The relative resource name of the environment, + of the form: + projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environment/{environment_id} + display_name (str): + Optional. User friendly display name. + uid (str): + Output only. System generated globally unique + ID for the environment. This ID will be + different if the environment is deleted and + re-created with the same name. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Environment creation time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the environment + was last updated. + labels (Sequence[google.cloud.dataplex_v1.types.Environment.LabelsEntry]): + Optional. User defined labels for the + environment. + description (str): + Optional. Description of the environment. + state (google.cloud.dataplex_v1.types.State): + Output only. Current state of the + environment. + infrastructure_spec (google.cloud.dataplex_v1.types.Environment.InfrastructureSpec): + Required. Infrastructure specification for + the Environment. + session_spec (google.cloud.dataplex_v1.types.Environment.SessionSpec): + Optional. Configuration for sessions created + for this environment. + session_status (google.cloud.dataplex_v1.types.Environment.SessionStatus): + Output only. Status of sessions created for + this environment. + endpoints (google.cloud.dataplex_v1.types.Environment.Endpoints): + Output only. URI Endpoints to access sessions + associated with the Environment. + """ + + class InfrastructureSpec(proto.Message): + r"""Configuration for the underlying infrastructure used to run + workloads. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + compute (google.cloud.dataplex_v1.types.Environment.InfrastructureSpec.ComputeResources): + Optional. Compute resources needed for + analyze interactive workloads. + + This field is a member of `oneof`_ ``resources``. + os_image (google.cloud.dataplex_v1.types.Environment.InfrastructureSpec.OsImageRuntime): + Required. Software Runtime Configuration for + analyze interactive workloads. + + This field is a member of `oneof`_ ``runtime``. + """ + + class ComputeResources(proto.Message): + r"""Compute resources associated with the analyze interactive + workloads. + + Attributes: + disk_size_gb (int): + Optional. Size in GB of the disk. Default is + 100 GB. + node_count (int): + Optional. Total number of nodes in the + sessions created for this environment. + max_node_count (int): + Optional. Max configurable nodes. If max_node_count > + node_count, then auto-scaling is enabled. + """ + + disk_size_gb = proto.Field(proto.INT32, number=1,) + node_count = proto.Field(proto.INT32, number=2,) + max_node_count = proto.Field(proto.INT32, number=3,) + + class OsImageRuntime(proto.Message): + r"""Software Runtime Configuration to run Analyze. + + Attributes: + image_version (str): + Required. Dataplex Image version. + java_libraries (Sequence[str]): + Optional. List of Java jars to be included in + the runtime environment. Valid input includes + Cloud Storage URIs to Jar binaries. For example, + gs://bucket-name/my/path/to/file.jar + python_packages (Sequence[str]): + Optional. A list of python packages to be + installed. Valid formats include Cloud Storage + URI to a PIP installable library. For example, + gs://bucket-name/my/path/to/lib.tar.gz + properties (Sequence[google.cloud.dataplex_v1.types.Environment.InfrastructureSpec.OsImageRuntime.PropertiesEntry]): + Optional. Spark properties to provide configuration for use + in sessions created for this environment. The properties to + set on daemon config files. Property keys are specified in + ``prefix:property`` format. The prefix must be "spark". + """ + + image_version = proto.Field(proto.STRING, number=1,) + java_libraries = proto.RepeatedField(proto.STRING, number=2,) + python_packages = proto.RepeatedField(proto.STRING, number=3,) + properties = proto.MapField(proto.STRING, proto.STRING, number=4,) + + compute = proto.Field( + proto.MESSAGE, + number=50, + oneof="resources", + message="Environment.InfrastructureSpec.ComputeResources", + ) + os_image = proto.Field( + proto.MESSAGE, + number=100, + oneof="runtime", + message="Environment.InfrastructureSpec.OsImageRuntime", + ) + + class SessionSpec(proto.Message): + r""" + + Attributes: + max_idle_duration (google.protobuf.duration_pb2.Duration): + Optional. The idle time configuration of the + session. The session will be auto-terminated at + the end of this period. + enable_fast_startup (bool): + Optional. If True, this causes sessions to be + pre-created and available for faster startup to + enable interactive exploration use-cases. This + defaults to False to avoid additional billed + charges. + These can only be set to True for the + environment with name set to "default", and with + default configuration. + """ + + max_idle_duration = proto.Field( + proto.MESSAGE, number=1, message=duration_pb2.Duration, + ) + enable_fast_startup = proto.Field(proto.BOOL, number=2,) + + class SessionStatus(proto.Message): + r""" + + Attributes: + active (bool): + Output only. Queries over sessions to mark + whether the environment is currently active or + not + """ + + active = proto.Field(proto.BOOL, number=1,) + + class Endpoints(proto.Message): + r""" + + Attributes: + notebooks (str): + Output only. URI to serve notebook APIs + sql (str): + Output only. URI to serve SQL APIs + """ + + notebooks = proto.Field(proto.STRING, number=1,) + sql = proto.Field(proto.STRING, number=2,) + + name = proto.Field(proto.STRING, number=1,) + display_name = proto.Field(proto.STRING, number=2,) + uid = proto.Field(proto.STRING, number=3,) + create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,) + update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp,) + labels = proto.MapField(proto.STRING, proto.STRING, number=6,) + description = proto.Field(proto.STRING, number=7,) + state = proto.Field(proto.ENUM, number=8, enum=resources.State,) + infrastructure_spec = proto.Field( + proto.MESSAGE, number=100, message=InfrastructureSpec, + ) + session_spec = proto.Field(proto.MESSAGE, number=101, message=SessionSpec,) + session_status = proto.Field(proto.MESSAGE, number=102, message=SessionStatus,) + endpoints = proto.Field(proto.MESSAGE, number=200, message=Endpoints,) + + +class Content(proto.Message): + r"""Content represents a user-visible notebook or a sql script + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Output only. The relative resource name of the content, of + the form: + projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id} + uid (str): + Output only. System generated globally unique + ID for the content. This ID will be different if + the content is deleted and re-created with the + same name. + path (str): + Required. The path for the Content file, + represented as directory structure. Unique + within a lake. Limited to alphanumerics, + hyphens, underscores, dots and slashes. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Content creation time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the content was + last updated. + labels (Sequence[google.cloud.dataplex_v1.types.Content.LabelsEntry]): + Optional. User defined labels for the + content. + description (str): + Optional. Description of the content. + data_text (str): + Required. Content data in string format. + + This field is a member of `oneof`_ ``data``. + sql_script (google.cloud.dataplex_v1.types.Content.SqlScript): + Sql Script related configurations. + + This field is a member of `oneof`_ ``content``. + notebook (google.cloud.dataplex_v1.types.Content.Notebook): + Notebook related configurations. + + This field is a member of `oneof`_ ``content``. + """ + + class SqlScript(proto.Message): + r"""Configuration for the Sql Script content. + + Attributes: + engine (google.cloud.dataplex_v1.types.Content.SqlScript.QueryEngine): + Required. Query Engine to be used for the Sql + Query. + """ + + class QueryEngine(proto.Enum): + r"""Query Engine Type of the SQL Script.""" + QUERY_ENGINE_UNSPECIFIED = 0 + SPARK = 2 + + engine = proto.Field( + proto.ENUM, number=1, enum="Content.SqlScript.QueryEngine", + ) + + class Notebook(proto.Message): + r"""Configuration for Notebook content. + + Attributes: + kernel_type (google.cloud.dataplex_v1.types.Content.Notebook.KernelType): + Required. Kernel Type of the notebook. + """ + + class KernelType(proto.Enum): + r"""Kernel Type of the Jupyter notebook.""" + KERNEL_TYPE_UNSPECIFIED = 0 + PYTHON3 = 1 + + kernel_type = proto.Field( + proto.ENUM, number=1, enum="Content.Notebook.KernelType", + ) + + name = proto.Field(proto.STRING, number=1,) + uid = proto.Field(proto.STRING, number=2,) + path = proto.Field(proto.STRING, number=3,) + create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp,) + update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp,) + labels = proto.MapField(proto.STRING, proto.STRING, number=6,) + description = proto.Field(proto.STRING, number=7,) + data_text = proto.Field(proto.STRING, number=9, oneof="data",) + sql_script = proto.Field( + proto.MESSAGE, number=100, oneof="content", message=SqlScript, + ) + notebook = proto.Field( + proto.MESSAGE, number=101, oneof="content", message=Notebook, + ) + + +class Session(proto.Message): + r"""Represents an active analyze session running for a user. + + Attributes: + name (str): + Output only. The relative resource name of the content, of + the form: + projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environment/{environment_id}/sessions/{session_id} + user_id (str): + Output only. Email of user running the + session. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Session start time. + state (google.cloud.dataplex_v1.types.State): + + """ + + name = proto.Field(proto.STRING, number=1,) + user_id = proto.Field(proto.STRING, number=2,) + create_time = proto.Field(proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp,) + state = proto.Field(proto.ENUM, number=4, enum=resources.State,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/content.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/content.py new file mode 100644 index 000000000000..7bb7bb0675f9 --- /dev/null +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/content.py @@ -0,0 +1,167 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.dataplex_v1.types import analyze +from google.protobuf import field_mask_pb2 # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.dataplex.v1", + manifest={ + "CreateContentRequest", + "UpdateContentRequest", + "DeleteContentRequest", + "ListContentRequest", + "ListContentResponse", + "GetContentRequest", + }, +) + + +class CreateContentRequest(proto.Message): + r"""Create content request. + + Attributes: + parent (str): + Required. The resource name of the parent lake: + projects/{project_id}/locations/{location_id}/lakes/{lake_id} + content (google.cloud.dataplex_v1.types.Content): + Required. Content resource. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + parent = proto.Field(proto.STRING, number=1,) + content = proto.Field(proto.MESSAGE, number=2, message=analyze.Content,) + validate_only = proto.Field(proto.BOOL, number=3,) + + +class UpdateContentRequest(proto.Message): + r"""Update content request. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + content (google.cloud.dataplex_v1.types.Content): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + update_mask = proto.Field( + proto.MESSAGE, number=1, message=field_mask_pb2.FieldMask, + ) + content = proto.Field(proto.MESSAGE, number=2, message=analyze.Content,) + validate_only = proto.Field(proto.BOOL, number=3,) + + +class DeleteContentRequest(proto.Message): + r"""Delete content request. + + Attributes: + name (str): + Required. The resource name of the content: + projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id} + """ + + name = proto.Field(proto.STRING, number=1,) + + +class ListContentRequest(proto.Message): + r"""List content request. Returns the BASIC Content view. + + Attributes: + parent (str): + Required. The resource name of the parent lake: + projects/{project_id}/locations/{location_id}/lakes/{lake_id} + page_size (int): + Optional. Maximum number of content to + return. The service may return fewer than this + value. If unspecified, at most 10 content will + be returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous + ``ListContent`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters + provided to ``ListContent`` must match the call that + provided the page token. + filter (str): + Optional. Filter request. Filters are case-sensitive. The + following formats are supported: + + labels.key1 = "value1" labels:key1 type = "NOTEBOOK" type = + "SQL_SCRIPT" + + These restrictions can be coinjoined with AND, OR and NOT + conjunctions. + """ + + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) + filter = proto.Field(proto.STRING, number=4,) + + +class ListContentResponse(proto.Message): + r"""List content response. + + Attributes: + content (Sequence[google.cloud.dataplex_v1.types.Content]): + Content under the given parent lake. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + content = proto.RepeatedField(proto.MESSAGE, number=1, message=analyze.Content,) + next_page_token = proto.Field(proto.STRING, number=2,) + + +class GetContentRequest(proto.Message): + r"""Get content request. + + Attributes: + name (str): + Required. The resource name of the content: + projects/{project_id}/locations/{location_id}/lakes/{lake_id}/content/{content_id} + view (google.cloud.dataplex_v1.types.GetContentRequest.ContentView): + Optional. Specify content view to make a + partial request. + """ + + class ContentView(proto.Enum): + r"""Specifies whether the request should return the full or the + partial representation. + """ + CONTENT_VIEW_UNSPECIFIED = 0 + BASIC = 1 + FULL = 2 + + name = proto.Field(proto.STRING, number=1,) + view = proto.Field(proto.ENUM, number=2, enum=ContentView,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/metadata_.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/metadata_.py index ca77bca0e19d..8fe070875667 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/metadata_.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/metadata_.py @@ -22,10 +22,15 @@ package="google.cloud.dataplex.v1", manifest={ "StorageSystem", + "CreateEntityRequest", + "UpdateEntityRequest", + "DeleteEntityRequest", "ListEntitiesRequest", "ListEntitiesResponse", "GetEntityRequest", "ListPartitionsRequest", + "CreatePartitionRequest", + "DeletePartitionRequest", "ListPartitionsResponse", "GetPartitionRequest", "Entity", @@ -43,6 +48,60 @@ class StorageSystem(proto.Enum): BIGQUERY = 2 +class CreateEntityRequest(proto.Message): + r"""Create a metadata entity request. + + Attributes: + parent (str): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}``. + entity (google.cloud.dataplex_v1.types.Entity): + Required. Entity resource. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + parent = proto.Field(proto.STRING, number=1,) + entity = proto.Field(proto.MESSAGE, number=3, message="Entity",) + validate_only = proto.Field(proto.BOOL, number=4,) + + +class UpdateEntityRequest(proto.Message): + r"""Update a metadata entity request. + The exiting entity will be fully replaced by the entity in the + request. The entity ID is mutable. To modify the ID, use the + current entity ID in the request URL and specify the new ID in + the request body. + + Attributes: + entity (google.cloud.dataplex_v1.types.Entity): + Required. Update description. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + entity = proto.Field(proto.MESSAGE, number=2, message="Entity",) + validate_only = proto.Field(proto.BOOL, number=3,) + + +class DeleteEntityRequest(proto.Message): + r"""Delete a metadata entity request. + + Attributes: + name (str): + Required. The resource name of the entity: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. + etag (str): + Required. The etag associated with the + partition if it was previously retrieved. + """ + + name = proto.Field(proto.STRING, number=1,) + etag = proto.Field(proto.STRING, number=2,) + + class ListEntitiesRequest(proto.Message): r"""List metadata entities request. @@ -56,9 +115,9 @@ class ListEntitiesRequest(proto.Message): page_size (int): Optional. Maximum number of entities to return. The service may return fewer than this - value. If unspecified, at most 10 entities will - be returned. The maximum value is 1000; values - above 1000 are set to 1000. + value. If unspecified, 100 entities will be + returned by default. The maximum value is 500; + larger values will will be truncated to 500. page_token (str): Optional. Page token received from a previous ``ListEntities`` call. Provide this to retrieve the @@ -66,7 +125,15 @@ class ListEntitiesRequest(proto.Message): provided to ``ListEntities`` must match the call that provided the page token. filter (str): - Optional. Filter request by name prefix. + Optional. The following filter parameters can be added to + the URL to limit the entities returned by the API: + + - Entity ID: ?filter="id=entityID" + - Asset ID: ?filter="asset=assetID" + - Data path ?filter="data_path=gs://my-bucket" + - Is HIVE compatible: ?filter=”hive_compatible=true” + - Is BigQuery compatible: + ?filter=”bigquery_compatible=true” """ class EntityView(proto.Enum): @@ -135,9 +202,10 @@ class ListPartitionsRequest(proto.Message): page_size (int): Optional. Maximum number of partitions to return. The service may return fewer than this - value. If unspecified, at most 10 partitions - will be returned. The maximum value is 1000; - values above 1000 will be coerced to 1000. + value. If unspecified, 100 partitions will be + returned by default. The maximum page size is + 500; larger values will will be truncated to + 500. page_token (str): Optional. Page token received from a previous ``ListPartitions`` call. Provide this to retrieve the @@ -145,7 +213,30 @@ class ListPartitionsRequest(proto.Message): provided to ``ListPartitions`` must match the call that provided the page token. filter (str): - Optional. Filter request. + Optional. Filter the partitions returned to the caller using + a key vslue pair expression. The filter expression supports: + + - logical operators: AND, OR + - comparison operators: <, >, >=, <= ,=, != + - LIKE operators: + + - The right hand of a LIKE operator supports “.” and “*” + for wildcard searches, for example "value1 LIKE + ".*oo.*" + + - parenthetical grouping: ( ) + + Sample filter expression: \`?filter="key1 < value1 OR key2 > + value2" + + **Notes:** + + - Keys to the left of operators are case insensitive. + - Partition results are sorted first by creation time, then + by lexicographic order. + - Up to 20 key value filter pairs are allowed, but due to + performance considerations, only the first 10 will be + used as a filter. """ parent = proto.Field(proto.STRING, number=1,) @@ -154,6 +245,44 @@ class ListPartitionsRequest(proto.Message): filter = proto.Field(proto.STRING, number=4,) +class CreatePartitionRequest(proto.Message): + r"""Create metadata partition request. + + Attributes: + parent (str): + Required. The resource name of the parent zone: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. + partition (google.cloud.dataplex_v1.types.Partition): + Required. Partition resource. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + parent = proto.Field(proto.STRING, number=1,) + partition = proto.Field(proto.MESSAGE, number=3, message="Partition",) + validate_only = proto.Field(proto.BOOL, number=4,) + + +class DeletePartitionRequest(proto.Message): + r"""Delete metadata partition request. + + Attributes: + name (str): + Required. The resource name of the partition. format: + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}/partitions/{partition_value_path}``. + The {partition_value_path} segment consists of an ordered + sequence of partition values separated by "/". All values + must be provided. + etag (str): + Optional. The etag associated with the + partition if it was previously retrieved. + """ + + name = proto.Field(proto.STRING, number=1,) + etag = proto.Field(proto.STRING, number=2,) + + class ListPartitionsResponse(proto.Message): r"""List metadata partitions response. @@ -180,7 +309,10 @@ class GetPartitionRequest(proto.Message): Attributes: name (str): Required. The resource name of the partition: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}/partitions/{partition_id}``. + ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}/partitions/{partition_value_path}``. + The {partition_value_path} segment consists of an ordered + sequence of partition values separated by "/". All values + must be provided. """ name = proto.Field(proto.STRING, number=1,) @@ -195,10 +327,12 @@ class Entity(proto.Message): Output only. The resource name of the entity, of the form: ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{id}``. display_name (str): - Optional. User friendly display name. + Optional. Display name must be shorter than + or equal to 63 characters. description (str): Optional. User friendly longer description - text. + text. Must be shorter than or equal to 1024 + characters. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time when the entity was created. @@ -210,16 +344,20 @@ class Entity(proto.Message): mutable, and will be used as the published table name. Specifying a new ID in an update entity request will override the existing value. + The ID must contain only letters (a-z, A-Z), + numbers (0-9), and underscores. Must begin with + a letter. etag (str): - Optional. The etag for this entity. - Required for update requests. It must match the + Optional. The etag for this entity. Required + for update and delete requests. Must match the server's etag. type_ (google.cloud.dataplex_v1.types.Entity.Type): - Required. The type of entity. + Required. Immutable. The type of entity. asset (str): - Required. The name of the asset associated - with the storage location containing the entity - data. + Required. Immutable. The ID of the asset + associated with the storage location containing + the entity data. The entity must be with in the + same zone with the asset. data_path (str): Required. Immutable. The storage path of the entity data. For Cloud Storage data, this is the fully-qualified path to @@ -235,8 +373,8 @@ class Entity(proto.Message): Output only. The name of the associated Data Catalog entry. system (google.cloud.dataplex_v1.types.StorageSystem): - Required. Identifies the storage system of - the entity data. + Required. Immutable. Identifies the storage + system of the entity data. format_ (google.cloud.dataplex_v1.types.StorageFormat): Required. Identifies the storage format of the entity data. It does not apply to entities @@ -318,10 +456,18 @@ class Partition(proto.Message): Attributes: name (str): - Output only. The resource name of the partition, of the - form: - ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}/partitions/{partition_id}``. - {partition_id} is a generated unique ID. + Output only. The values must be HTML URL + encoded two times before constructing the path. + For example, if you have a value of "US:CA", + encoded it two times and you get "US%253ACA". + Then if you have the 2nd value is + "CA#Sunnyvale", encoded two times and you get + "CA%2523Sunnyvale". The partition values path is + "US%253ACA/CA%2523Sunnyvale". The final URL will + be + "https://.../partitions/US%253ACA/CA%2523Sunnyvale". + The name field in the responses will always have + the encoded format. values (Sequence[str]): Required. Immutable. The set of values representing the partition, which correspond to @@ -330,11 +476,10 @@ class Partition(proto.Message): location (str): Required. Immutable. The location of the entity data within the partition, for example, - ``gs://bucket/path/to/entity/key1=value1/key2=value2``. + ``gs://bucket/path/to/entity/key1=value1/key2=value2``. Or + ``projects//datasets//tables/`` etag (str): Optional. The etag for this partition. - Required for update requests. It must match the - server's etag. """ name = proto.Field(proto.STRING, number=1,) @@ -349,9 +494,30 @@ class Schema(proto.Message): Attributes: user_managed (bool): - Required. Whether the schema is user-managed - or managed by the service. User-managed schemas - are not automatically updated by discovery jobs. + Required. Whether the schema is user-managed or managed by + the service. + + - Set user_manage to false if you would like Dataplex to + help you manage the schema. You will get the full service + provided by Dataplex discovery, including new data + discovery, schema inference and schema evolution. You can + still provide input the schema of the entities, for + example renaming a schema field, changing CSV or Json + options if you think the discovered values are not as + accurate. Dataplex will consider your input as the + initial schema (as if they were produced by the previous + discovery run), and will evolve schema or flag actions + based on that. + - Set user_manage to true if you would like to fully manage + the entity schema by yourself. This is useful when you + would like to manually specify the schema for a table. In + this case, the schema defined by the user is guaranteed + to be kept unchanged and would not be overwritten. But + this also means Dataplex will not provide schema + evolution management for you. Dataplex will still be able + to manage partition registration (i.e., keeping the list + of partitions up to date) when Dataplex discovery is + turned on and user_managed is set to true. fields (Sequence[google.cloud.dataplex_v1.types.Schema.SchemaField]): Optional. The sequence of fields describing data in table entities. @@ -403,9 +569,12 @@ class SchemaField(proto.Message): Attributes: name (str): - Required. The name of the field. + Required. The name of the field. The maximum length is 767 + characters. The name must begins with a letter and not + contains ``:`` and ``.``. description (str): Optional. User friendly field description. + Must be less than or equal to 1024 characters. type_ (google.cloud.dataplex_v1.types.Schema.Type): Required. The type of field. mode (google.cloud.dataplex_v1.types.Schema.Mode): @@ -424,13 +593,17 @@ class SchemaField(proto.Message): class PartitionField(proto.Message): r"""Represents a key field within the entity's partition - structure. + structure. You could have up to 20 partition fields, but only + the first 10 partitions have the filtering ability due to + performance consideration. Attributes: name (str): - Required. The name of the field. + Required. Partition name is editable if only + the partition style is not HIVE compatible. The + maximum length allowed is 767 characters. type_ (google.cloud.dataplex_v1.types.Schema.Type): - Required. The type of field. + Required. Immutable. The type of field. """ name = proto.Field(proto.STRING, number=1,) @@ -458,14 +631,25 @@ class StorageFormat(proto.Message): format_ (google.cloud.dataplex_v1.types.StorageFormat.Format): Output only. The data format associated with the stored data, which represents content type - values. + values. The value is inferred from mime type. compression_format (google.cloud.dataplex_v1.types.StorageFormat.CompressionFormat): Optional. The compression type associated with the stored data. If unspecified, the data is uncompressed. mime_type (str): - Required. The mime type descriptor for the data. This field - is valid for formats other than ``UNKNOWN`` and ``MIXED``. + Required. The mime type descriptor for the + data. Must match the pattern {type}/{subtype}. + Supported values: - application/x-parquet + - application/x-avro + - application/x-orc + - application/x-tfrecord + - application/json + - application/{subtypes} + - text/csv + - text/ + - image/{image subtype} + - video/{video subtype} + - audio/{audio subtype} csv (google.cloud.dataplex_v1.types.StorageFormat.CsvOptions): Optional. Additional information about CSV formatted data. @@ -506,18 +690,19 @@ class CsvOptions(proto.Message): Attributes: encoding (str): Optional. The character encoding of the data. - The default is UTF-8. + Accepts "US-ASCII", "UTF-8", and "ISO-8859-1". + Defaults to UTF-8 if unspecified. header_rows (int): Optional. The number of rows to interpret as header rows that should be skipped when reading - data rows. + data rows. Defaults to 0. delimiter (str): Optional. The delimiter used to separate values. Defaults to ','. quote (str): Optional. The character used to quote column - values. Defaults to empty, implying unquoted - data. + values. Accepts '"' and '''. Defaults to '"' if + unspecified. """ encoding = proto.Field(proto.STRING, number=1,) @@ -531,7 +716,8 @@ class JsonOptions(proto.Message): Attributes: encoding (str): Optional. The character encoding of the data. - The default is UTF-8. + Accepts "US-ASCII", "UTF-8" and "ISO-8859-1". + Defaults to UTF-8 if not specified. """ encoding = proto.Field(proto.STRING, number=1,) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/resources.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/resources.py index 471f10ffa04b..72b661f9b637 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/resources.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/resources.py @@ -776,14 +776,7 @@ class ResourceSpec(proto.Message): Immutable. Relative name of the cloud resource that contains the data that is being managed within a lake. For example: ``projects/{project_number}/buckets/{bucket_id}`` - ``projects/{project_number}/datasets/{dataset_id}`` If the - creation policy indicates ATTACH behavior, then an existing - resource must be provided. If the policy indicates CREATE - behavior, new resource will be created with the given - name.However if it is empty, then the resource will be - created using {asset_id}-{UUID} template for name. The - location of the referenced resource must always match that - of the asset. + ``projects/{project_number}/datasets/{dataset_id}`` type_ (google.cloud.dataplex_v1.types.Asset.ResourceSpec.Type): Required. Immutable. Type of resource. """ diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/service.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/service.py index 788c37fe3473..e9e407775e21 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/service.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/service.py @@ -15,6 +15,7 @@ # import proto # type: ignore +from google.cloud.dataplex_v1.types import analyze from google.cloud.dataplex_v1.types import resources from google.cloud.dataplex_v1.types import tasks as gcd_tasks from google.protobuf import field_mask_pb2 # type: ignore @@ -57,6 +58,14 @@ "ListJobsRequest", "ListJobsResponse", "CancelJobRequest", + "CreateEnvironmentRequest", + "UpdateEnvironmentRequest", + "DeleteEnvironmentRequest", + "ListEnvironmentsRequest", + "ListEnvironmentsResponse", + "GetEnvironmentRequest", + "ListSessionsRequest", + "ListSessionsResponse", }, ) @@ -67,7 +76,7 @@ class CreateLakeRequest(proto.Message): Attributes: parent (str): Required. The resource name of the lake location, of the - form: ``projects/{project_number}/locations/{location_id}`` + form: projects/{project_number}/locations/{location_id} where ``location_id`` refers to a GCP region. lake_id (str): Required. Lake identifier. This ID will be used to generate @@ -775,4 +784,177 @@ class CancelJobRequest(proto.Message): name = proto.Field(proto.STRING, number=1,) +class CreateEnvironmentRequest(proto.Message): + r"""Create environment request. + + Attributes: + parent (str): + Required. The resource name of the parent lake: + projects/{project_id}/locations/{location_id}/lakes/{lake_id} + environment_id (str): + Required. Environment identifier. + + - Must contain only lowercase letters, numbers and hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the lake. + environment (google.cloud.dataplex_v1.types.Environment): + Required. Environment resource. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + parent = proto.Field(proto.STRING, number=1,) + environment_id = proto.Field(proto.STRING, number=2,) + environment = proto.Field(proto.MESSAGE, number=3, message=analyze.Environment,) + validate_only = proto.Field(proto.BOOL, number=4,) + + +class UpdateEnvironmentRequest(proto.Message): + r"""Update environment request. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. + environment (google.cloud.dataplex_v1.types.Environment): + Required. Update description. Only fields specified in + ``update_mask`` are updated. + validate_only (bool): + Optional. Only validate the request, but do + not perform mutations. The default is false. + """ + + update_mask = proto.Field( + proto.MESSAGE, number=1, message=field_mask_pb2.FieldMask, + ) + environment = proto.Field(proto.MESSAGE, number=2, message=analyze.Environment,) + validate_only = proto.Field(proto.BOOL, number=3,) + + +class DeleteEnvironmentRequest(proto.Message): + r"""Delete environment request. + + Attributes: + name (str): + Required. The resource name of the environment: + projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environments/{environment_id}\` + """ + + name = proto.Field(proto.STRING, number=1,) + + +class ListEnvironmentsRequest(proto.Message): + r"""List environments request. + + Attributes: + parent (str): + Required. The resource name of the parent lake: + projects/{project_id}/locations/{location_id}/lakes/{lake_id} + page_size (int): + Optional. Maximum number of environments to + return. The service may return fewer than this + value. If unspecified, at most 10 environments + will be returned. The maximum value is 1000; + values above 1000 will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous + ``ListEnvironments`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters + provided to ``ListEnvironments`` must match the call that + provided the page token. + filter (str): + Optional. Filter request. + order_by (str): + Optional. Order by fields for the result. + """ + + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) + filter = proto.Field(proto.STRING, number=4,) + order_by = proto.Field(proto.STRING, number=5,) + + +class ListEnvironmentsResponse(proto.Message): + r"""List environments response. + + Attributes: + environments (Sequence[google.cloud.dataplex_v1.types.Environment]): + Environments under the given parent lake. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + environments = proto.RepeatedField( + proto.MESSAGE, number=1, message=analyze.Environment, + ) + next_page_token = proto.Field(proto.STRING, number=2,) + + +class GetEnvironmentRequest(proto.Message): + r"""Get environment request. + + Attributes: + name (str): + Required. The resource name of the environment: + projects/{project_id}/locations/{location_id}/lakes/{lake_id}/environments/{environment_id} + """ + + name = proto.Field(proto.STRING, number=1,) + + +class ListSessionsRequest(proto.Message): + r"""List sessions request. + + Attributes: + parent (str): + Required. The resource name of the parent environment: + projects/{project_number}/locations/{location_id}/lakes/{lake_id}/environment/{environment_id} + page_size (int): + Optional. Maximum number of sessions to + return. The service may return fewer than this + value. If unspecified, at most 10 sessions will + be returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + Optional. Page token received from a previous + ``ListSessions`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters + provided to ``ListSessions`` must match the call that + provided the page token. + """ + + parent = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) + + +class ListSessionsResponse(proto.Message): + r"""List sessions response. + + Attributes: + sessions (Sequence[google.cloud.dataplex_v1.types.Session]): + Sessions under a given environment. + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + """ + + @property + def raw_page(self): + return self + + sessions = proto.RepeatedField(proto.MESSAGE, number=1, message=analyze.Session,) + next_page_token = proto.Field(proto.STRING, number=2,) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_content_service_create_content_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_content_service_create_content_async.py new file mode 100644 index 000000000000..187012c8e5ad --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_content_service_create_content_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_generated_dataplex_v1_ContentService_CreateContent_async] +from google.cloud import dataplex_v1 + + +async def sample_create_content(): + # Create a client + client = dataplex_v1.ContentServiceAsyncClient() + + # Initialize request argument(s) + content = dataplex_v1.Content() + content.data_text = "data_text_value" + content.sql_script.engine = "SPARK" + content.path = "path_value" + + request = dataplex_v1.CreateContentRequest( + parent="parent_value", + content=content, + ) + + # Make the request + response = await client.create_content(request=request) + + # Handle the response + print(response) + +# [END dataplex_generated_dataplex_v1_ContentService_CreateContent_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_content_service_create_content_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_content_service_create_content_sync.py new file mode 100644 index 000000000000..7c0280336e83 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_content_service_create_content_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_generated_dataplex_v1_ContentService_CreateContent_sync] +from google.cloud import dataplex_v1 + + +def sample_create_content(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + content = dataplex_v1.Content() + content.data_text = "data_text_value" + content.sql_script.engine = "SPARK" + content.path = "path_value" + + request = dataplex_v1.CreateContentRequest( + parent="parent_value", + content=content, + ) + + # Make the request + response = client.create_content(request=request) + + # Handle the response + print(response) + +# [END dataplex_generated_dataplex_v1_ContentService_CreateContent_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_content_service_delete_content_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_content_service_delete_content_async.py new file mode 100644 index 000000000000..db3140de0636 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_content_service_delete_content_async.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_generated_dataplex_v1_ContentService_DeleteContent_async] +from google.cloud import dataplex_v1 + + +async def sample_delete_content(): + # Create a client + client = dataplex_v1.ContentServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteContentRequest( + name="name_value", + ) + + # Make the request + await client.delete_content(request=request) + + +# [END dataplex_generated_dataplex_v1_ContentService_DeleteContent_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_content_service_delete_content_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_content_service_delete_content_sync.py new file mode 100644 index 000000000000..26bc04af0c38 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_content_service_delete_content_sync.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_generated_dataplex_v1_ContentService_DeleteContent_sync] +from google.cloud import dataplex_v1 + + +def sample_delete_content(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteContentRequest( + name="name_value", + ) + + # Make the request + client.delete_content(request=request) + + +# [END dataplex_generated_dataplex_v1_ContentService_DeleteContent_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_content_service_get_content_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_content_service_get_content_async.py new file mode 100644 index 000000000000..5387b2aeedfc --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_content_service_get_content_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_generated_dataplex_v1_ContentService_GetContent_async] +from google.cloud import dataplex_v1 + + +async def sample_get_content(): + # Create a client + client = dataplex_v1.ContentServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetContentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_content(request=request) + + # Handle the response + print(response) + +# [END dataplex_generated_dataplex_v1_ContentService_GetContent_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_content_service_get_content_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_content_service_get_content_sync.py new file mode 100644 index 000000000000..d102f7270129 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_content_service_get_content_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_generated_dataplex_v1_ContentService_GetContent_sync] +from google.cloud import dataplex_v1 + + +def sample_get_content(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetContentRequest( + name="name_value", + ) + + # Make the request + response = client.get_content(request=request) + + # Handle the response + print(response) + +# [END dataplex_generated_dataplex_v1_ContentService_GetContent_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_content_service_list_content_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_content_service_list_content_async.py new file mode 100644 index 000000000000..7bd8d74ae05f --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_content_service_list_content_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_generated_dataplex_v1_ContentService_ListContent_async] +from google.cloud import dataplex_v1 + + +async def sample_list_content(): + # Create a client + client = dataplex_v1.ContentServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListContentRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_content(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_generated_dataplex_v1_ContentService_ListContent_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_content_service_list_content_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_content_service_list_content_sync.py new file mode 100644 index 000000000000..b8e8d92cf941 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_content_service_list_content_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_generated_dataplex_v1_ContentService_ListContent_sync] +from google.cloud import dataplex_v1 + + +def sample_list_content(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListContentRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_content(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_generated_dataplex_v1_ContentService_ListContent_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_content_service_update_content_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_content_service_update_content_async.py new file mode 100644 index 000000000000..8754f4e56d81 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_content_service_update_content_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_generated_dataplex_v1_ContentService_UpdateContent_async] +from google.cloud import dataplex_v1 + + +async def sample_update_content(): + # Create a client + client = dataplex_v1.ContentServiceAsyncClient() + + # Initialize request argument(s) + content = dataplex_v1.Content() + content.data_text = "data_text_value" + content.sql_script.engine = "SPARK" + content.path = "path_value" + + request = dataplex_v1.UpdateContentRequest( + content=content, + ) + + # Make the request + response = await client.update_content(request=request) + + # Handle the response + print(response) + +# [END dataplex_generated_dataplex_v1_ContentService_UpdateContent_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_content_service_update_content_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_content_service_update_content_sync.py new file mode 100644 index 000000000000..f13efa402ef6 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_content_service_update_content_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateContent +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_generated_dataplex_v1_ContentService_UpdateContent_sync] +from google.cloud import dataplex_v1 + + +def sample_update_content(): + # Create a client + client = dataplex_v1.ContentServiceClient() + + # Initialize request argument(s) + content = dataplex_v1.Content() + content.data_text = "data_text_value" + content.sql_script.engine = "SPARK" + content.path = "path_value" + + request = dataplex_v1.UpdateContentRequest( + content=content, + ) + + # Make the request + response = client.update_content(request=request) + + # Handle the response + print(response) + +# [END dataplex_generated_dataplex_v1_ContentService_UpdateContent_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_dataplex_service_create_environment_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_dataplex_service_create_environment_async.py new file mode 100644 index 000000000000..88af5ce40091 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_dataplex_service_create_environment_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_generated_dataplex_v1_DataplexService_CreateEnvironment_async] +from google.cloud import dataplex_v1 + + +async def sample_create_environment(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + environment = dataplex_v1.Environment() + environment.infrastructure_spec.os_image.image_version = "image_version_value" + + request = dataplex_v1.CreateEnvironmentRequest( + parent="parent_value", + environment_id="environment_id_value", + environment=environment, + ) + + # Make the request + operation = client.create_environment(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + +# [END dataplex_generated_dataplex_v1_DataplexService_CreateEnvironment_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_dataplex_service_create_environment_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_dataplex_service_create_environment_sync.py new file mode 100644 index 000000000000..eff36a325eb6 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_dataplex_service_create_environment_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_generated_dataplex_v1_DataplexService_CreateEnvironment_sync] +from google.cloud import dataplex_v1 + + +def sample_create_environment(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + environment = dataplex_v1.Environment() + environment.infrastructure_spec.os_image.image_version = "image_version_value" + + request = dataplex_v1.CreateEnvironmentRequest( + parent="parent_value", + environment_id="environment_id_value", + environment=environment, + ) + + # Make the request + operation = client.create_environment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_generated_dataplex_v1_DataplexService_CreateEnvironment_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_dataplex_service_delete_environment_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_dataplex_service_delete_environment_async.py new file mode 100644 index 000000000000..451fb47eef4c --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_dataplex_service_delete_environment_async.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_generated_dataplex_v1_DataplexService_DeleteEnvironment_async] +from google.cloud import dataplex_v1 + + +async def sample_delete_environment(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEnvironmentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_environment(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + +# [END dataplex_generated_dataplex_v1_DataplexService_DeleteEnvironment_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_dataplex_service_delete_environment_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_dataplex_service_delete_environment_sync.py new file mode 100644 index 000000000000..8904beadd077 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_dataplex_service_delete_environment_sync.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_generated_dataplex_v1_DataplexService_DeleteEnvironment_sync] +from google.cloud import dataplex_v1 + + +def sample_delete_environment(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEnvironmentRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_environment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_generated_dataplex_v1_DataplexService_DeleteEnvironment_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_dataplex_service_get_environment_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_dataplex_service_get_environment_async.py new file mode 100644 index 000000000000..35a4500cd4e1 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_dataplex_service_get_environment_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_generated_dataplex_v1_DataplexService_GetEnvironment_async] +from google.cloud import dataplex_v1 + + +async def sample_get_environment(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEnvironmentRequest( + name="name_value", + ) + + # Make the request + response = await client.get_environment(request=request) + + # Handle the response + print(response) + +# [END dataplex_generated_dataplex_v1_DataplexService_GetEnvironment_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_dataplex_service_get_environment_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_dataplex_service_get_environment_sync.py new file mode 100644 index 000000000000..eb996ee3eaa9 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_dataplex_service_get_environment_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_generated_dataplex_v1_DataplexService_GetEnvironment_sync] +from google.cloud import dataplex_v1 + + +def sample_get_environment(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEnvironmentRequest( + name="name_value", + ) + + # Make the request + response = client.get_environment(request=request) + + # Handle the response + print(response) + +# [END dataplex_generated_dataplex_v1_DataplexService_GetEnvironment_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_dataplex_service_list_environments_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_dataplex_service_list_environments_async.py new file mode 100644 index 000000000000..cf8d0f64ab77 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_dataplex_service_list_environments_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEnvironments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_generated_dataplex_v1_DataplexService_ListEnvironments_async] +from google.cloud import dataplex_v1 + + +async def sample_list_environments(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEnvironmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_environments(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_generated_dataplex_v1_DataplexService_ListEnvironments_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_dataplex_service_list_environments_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_dataplex_service_list_environments_sync.py new file mode 100644 index 000000000000..eb81948813c9 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_dataplex_service_list_environments_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListEnvironments +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_generated_dataplex_v1_DataplexService_ListEnvironments_sync] +from google.cloud import dataplex_v1 + + +def sample_list_environments(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListEnvironmentsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_environments(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_generated_dataplex_v1_DataplexService_ListEnvironments_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_dataplex_service_list_sessions_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_dataplex_service_list_sessions_async.py new file mode 100644 index 000000000000..354eb7492cc3 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_dataplex_service_list_sessions_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSessions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_generated_dataplex_v1_DataplexService_ListSessions_async] +from google.cloud import dataplex_v1 + + +async def sample_list_sessions(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListSessionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sessions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_generated_dataplex_v1_DataplexService_ListSessions_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_dataplex_service_list_sessions_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_dataplex_service_list_sessions_sync.py new file mode 100644 index 000000000000..a7b79a38dc98 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_dataplex_service_list_sessions_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSessions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_generated_dataplex_v1_DataplexService_ListSessions_sync] +from google.cloud import dataplex_v1 + + +def sample_list_sessions(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListSessionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_sessions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_generated_dataplex_v1_DataplexService_ListSessions_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_dataplex_service_update_environment_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_dataplex_service_update_environment_async.py new file mode 100644 index 000000000000..87c4c64a031a --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_dataplex_service_update_environment_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_generated_dataplex_v1_DataplexService_UpdateEnvironment_async] +from google.cloud import dataplex_v1 + + +async def sample_update_environment(): + # Create a client + client = dataplex_v1.DataplexServiceAsyncClient() + + # Initialize request argument(s) + environment = dataplex_v1.Environment() + environment.infrastructure_spec.os_image.image_version = "image_version_value" + + request = dataplex_v1.UpdateEnvironmentRequest( + environment=environment, + ) + + # Make the request + operation = client.update_environment(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + +# [END dataplex_generated_dataplex_v1_DataplexService_UpdateEnvironment_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_dataplex_service_update_environment_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_dataplex_service_update_environment_sync.py new file mode 100644 index 000000000000..9e22902a6022 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_dataplex_service_update_environment_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEnvironment +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_generated_dataplex_v1_DataplexService_UpdateEnvironment_sync] +from google.cloud import dataplex_v1 + + +def sample_update_environment(): + # Create a client + client = dataplex_v1.DataplexServiceClient() + + # Initialize request argument(s) + environment = dataplex_v1.Environment() + environment.infrastructure_spec.os_image.image_version = "image_version_value" + + request = dataplex_v1.UpdateEnvironmentRequest( + environment=environment, + ) + + # Make the request + operation = client.update_environment(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_generated_dataplex_v1_DataplexService_UpdateEnvironment_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_metadata_service_create_entity_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_metadata_service_create_entity_async.py new file mode 100644 index 000000000000..b2071cd4e95e --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_metadata_service_create_entity_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEntity +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_generated_dataplex_v1_MetadataService_CreateEntity_async] +from google.cloud import dataplex_v1 + + +async def sample_create_entity(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + entity = dataplex_v1.Entity() + entity.id = "id_value" + entity.type_ = "FILESET" + entity.asset = "asset_value" + entity.data_path = "data_path_value" + entity.system = "BIGQUERY" + entity.format_.mime_type = "mime_type_value" + entity.schema.user_managed = True + + request = dataplex_v1.CreateEntityRequest( + parent="parent_value", + entity=entity, + ) + + # Make the request + response = await client.create_entity(request=request) + + # Handle the response + print(response) + +# [END dataplex_generated_dataplex_v1_MetadataService_CreateEntity_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_metadata_service_create_entity_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_metadata_service_create_entity_sync.py new file mode 100644 index 000000000000..f5ac22741066 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_metadata_service_create_entity_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEntity +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_generated_dataplex_v1_MetadataService_CreateEntity_sync] +from google.cloud import dataplex_v1 + + +def sample_create_entity(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + entity = dataplex_v1.Entity() + entity.id = "id_value" + entity.type_ = "FILESET" + entity.asset = "asset_value" + entity.data_path = "data_path_value" + entity.system = "BIGQUERY" + entity.format_.mime_type = "mime_type_value" + entity.schema.user_managed = True + + request = dataplex_v1.CreateEntityRequest( + parent="parent_value", + entity=entity, + ) + + # Make the request + response = client.create_entity(request=request) + + # Handle the response + print(response) + +# [END dataplex_generated_dataplex_v1_MetadataService_CreateEntity_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_metadata_service_create_partition_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_metadata_service_create_partition_async.py new file mode 100644 index 000000000000..86d89e2b7cbc --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_metadata_service_create_partition_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreatePartition +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_generated_dataplex_v1_MetadataService_CreatePartition_async] +from google.cloud import dataplex_v1 + + +async def sample_create_partition(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + partition = dataplex_v1.Partition() + partition.values = ['values_value_1', 'values_value_2'] + partition.location = "location_value" + + request = dataplex_v1.CreatePartitionRequest( + parent="parent_value", + partition=partition, + ) + + # Make the request + response = await client.create_partition(request=request) + + # Handle the response + print(response) + +# [END dataplex_generated_dataplex_v1_MetadataService_CreatePartition_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_metadata_service_create_partition_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_metadata_service_create_partition_sync.py new file mode 100644 index 000000000000..dd50bdc041fd --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_metadata_service_create_partition_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreatePartition +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_generated_dataplex_v1_MetadataService_CreatePartition_sync] +from google.cloud import dataplex_v1 + + +def sample_create_partition(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + partition = dataplex_v1.Partition() + partition.values = ['values_value_1', 'values_value_2'] + partition.location = "location_value" + + request = dataplex_v1.CreatePartitionRequest( + parent="parent_value", + partition=partition, + ) + + # Make the request + response = client.create_partition(request=request) + + # Handle the response + print(response) + +# [END dataplex_generated_dataplex_v1_MetadataService_CreatePartition_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_metadata_service_delete_entity_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_metadata_service_delete_entity_async.py new file mode 100644 index 000000000000..2a7ebabb5b5e --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_metadata_service_delete_entity_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEntity +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_generated_dataplex_v1_MetadataService_DeleteEntity_async] +from google.cloud import dataplex_v1 + + +async def sample_delete_entity(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntityRequest( + name="name_value", + etag="etag_value", + ) + + # Make the request + await client.delete_entity(request=request) + + +# [END dataplex_generated_dataplex_v1_MetadataService_DeleteEntity_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_metadata_service_delete_entity_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_metadata_service_delete_entity_sync.py new file mode 100644 index 000000000000..b09736e31dda --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_metadata_service_delete_entity_sync.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEntity +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_generated_dataplex_v1_MetadataService_DeleteEntity_sync] +from google.cloud import dataplex_v1 + + +def sample_delete_entity(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntityRequest( + name="name_value", + etag="etag_value", + ) + + # Make the request + client.delete_entity(request=request) + + +# [END dataplex_generated_dataplex_v1_MetadataService_DeleteEntity_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_metadata_service_delete_partition_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_metadata_service_delete_partition_async.py new file mode 100644 index 000000000000..4b6df8247da9 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_metadata_service_delete_partition_async.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeletePartition +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_generated_dataplex_v1_MetadataService_DeletePartition_async] +from google.cloud import dataplex_v1 + + +async def sample_delete_partition(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeletePartitionRequest( + name="name_value", + ) + + # Make the request + await client.delete_partition(request=request) + + +# [END dataplex_generated_dataplex_v1_MetadataService_DeletePartition_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_metadata_service_delete_partition_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_metadata_service_delete_partition_sync.py new file mode 100644 index 000000000000..7ef85aa54231 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_metadata_service_delete_partition_sync.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeletePartition +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_generated_dataplex_v1_MetadataService_DeletePartition_sync] +from google.cloud import dataplex_v1 + + +def sample_delete_partition(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeletePartitionRequest( + name="name_value", + ) + + # Make the request + client.delete_partition(request=request) + + +# [END dataplex_generated_dataplex_v1_MetadataService_DeletePartition_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_metadata_service_update_entity_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_metadata_service_update_entity_async.py new file mode 100644 index 000000000000..922bcbfbb3bd --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_metadata_service_update_entity_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEntity +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_generated_dataplex_v1_MetadataService_UpdateEntity_async] +from google.cloud import dataplex_v1 + + +async def sample_update_entity(): + # Create a client + client = dataplex_v1.MetadataServiceAsyncClient() + + # Initialize request argument(s) + entity = dataplex_v1.Entity() + entity.id = "id_value" + entity.type_ = "FILESET" + entity.asset = "asset_value" + entity.data_path = "data_path_value" + entity.system = "BIGQUERY" + entity.format_.mime_type = "mime_type_value" + entity.schema.user_managed = True + + request = dataplex_v1.UpdateEntityRequest( + entity=entity, + ) + + # Make the request + response = await client.update_entity(request=request) + + # Handle the response + print(response) + +# [END dataplex_generated_dataplex_v1_MetadataService_UpdateEntity_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_metadata_service_update_entity_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_metadata_service_update_entity_sync.py new file mode 100644 index 000000000000..27cff416e134 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_generated_dataplex_v1_metadata_service_update_entity_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateEntity +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_generated_dataplex_v1_MetadataService_UpdateEntity_sync] +from google.cloud import dataplex_v1 + + +def sample_update_entity(): + # Create a client + client = dataplex_v1.MetadataServiceClient() + + # Initialize request argument(s) + entity = dataplex_v1.Entity() + entity.id = "id_value" + entity.type_ = "FILESET" + entity.asset = "asset_value" + entity.data_path = "data_path_value" + entity.system = "BIGQUERY" + entity.format_.mime_type = "mime_type_value" + entity.schema.user_managed = True + + request = dataplex_v1.UpdateEntityRequest( + entity=entity, + ) + + # Make the request + response = client.update_entity(request=request) + + # Handle the response + print(response) + +# [END dataplex_generated_dataplex_v1_MetadataService_UpdateEntity_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_dataplex_v1.json b/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_dataplex_v1.json index 72afed9a644b..a58cc1b03a29 100644 --- a/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_dataplex_v1.json +++ b/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_dataplex_v1.json @@ -5,13 +5,102 @@ "async": true, "method": { "service": { - "shortName": "DataplexService" + "shortName": "ContentService" }, - "shortName": "CancelJob" + "shortName": "CreateContent" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_cancel_job_async.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_CancelJob_async", + "file": "dataplex_generated_dataplex_v1_content_service_create_content_async.py", + "regionTag": "dataplex_generated_dataplex_v1_ContentService_CreateContent_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ContentService" + }, + "shortName": "CreateContent" + } + }, + "file": "dataplex_generated_dataplex_v1_content_service_create_content_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_ContentService_CreateContent_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ContentService" + }, + "shortName": "DeleteContent" + } + }, + "file": "dataplex_generated_dataplex_v1_content_service_delete_content_async.py", + "regionTag": "dataplex_generated_dataplex_v1_ContentService_DeleteContent_async", "segments": [ { "end": 42, @@ -47,13 +136,13 @@ "clientMethod": { "method": { "service": { - "shortName": "DataplexService" + "shortName": "ContentService" }, - "shortName": "CancelJob" + "shortName": "DeleteContent" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_cancel_job_sync.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_CancelJob_sync", + "file": "dataplex_generated_dataplex_v1_content_service_delete_content_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_ContentService_DeleteContent_sync", "segments": [ { "end": 42, @@ -90,21 +179,21 @@ "async": true, "method": { "service": { - "shortName": "DataplexService" + "shortName": "ContentService" }, - "shortName": "CreateAsset" + "shortName": "GetContent" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_create_asset_async.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_CreateAsset_async", + "file": "dataplex_generated_dataplex_v1_content_service_get_content_async.py", + "regionTag": "dataplex_generated_dataplex_v1_ContentService_GetContent_async", "segments": [ { - "end": 53, + "end": 44, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 44, "start": 27, "type": "SHORT" }, @@ -114,18 +203,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 43, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 44, + "end": 41, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 45, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -134,21 +223,21 @@ "clientMethod": { "method": { "service": { - "shortName": "DataplexService" + "shortName": "ContentService" }, - "shortName": "CreateAsset" + "shortName": "GetContent" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_create_asset_sync.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_CreateAsset_sync", + "file": "dataplex_generated_dataplex_v1_content_service_get_content_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_ContentService_GetContent_sync", "segments": [ { - "end": 53, + "end": 44, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 44, "start": 27, "type": "SHORT" }, @@ -158,18 +247,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 43, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 44, + "end": 41, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 45, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -179,13 +268,102 @@ "async": true, "method": { "service": { - "shortName": "DataplexService" + "shortName": "ContentService" }, - "shortName": "CreateLake" + "shortName": "ListContent" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_create_lake_async.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_CreateLake_async", + "file": "dataplex_generated_dataplex_v1_content_service_list_content_async.py", + "regionTag": "dataplex_generated_dataplex_v1_ContentService_ListContent_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ContentService" + }, + "shortName": "ListContent" + } + }, + "file": "dataplex_generated_dataplex_v1_content_service_list_content_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_ContentService_ListContent_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ContentService" + }, + "shortName": "UpdateContent" + } + }, + "file": "dataplex_generated_dataplex_v1_content_service_update_content_async.py", + "regionTag": "dataplex_generated_dataplex_v1_ContentService_UpdateContent_async", "segments": [ { "end": 49, @@ -203,13 +381,13 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 39, + "end": 43, "start": 34, "type": "REQUEST_INITIALIZATION" }, { "end": 46, - "start": 40, + "start": 44, "type": "REQUEST_EXECUTION" }, { @@ -223,13 +401,13 @@ "clientMethod": { "method": { "service": { - "shortName": "DataplexService" + "shortName": "ContentService" }, - "shortName": "CreateLake" + "shortName": "UpdateContent" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_create_lake_sync.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_CreateLake_sync", + "file": "dataplex_generated_dataplex_v1_content_service_update_content_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_ContentService_UpdateContent_sync", "segments": [ { "end": 49, @@ -247,13 +425,13 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 39, + "end": 43, "start": 34, "type": "REQUEST_INITIALIZATION" }, { "end": 46, - "start": 40, + "start": 44, "type": "REQUEST_EXECUTION" }, { @@ -270,19 +448,1172 @@ "service": { "shortName": "DataplexService" }, - "shortName": "CreateTask" + "shortName": "CancelJob" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_create_task_async.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_CreateTask_async", + "file": "dataplex_generated_dataplex_v1_dataplex_service_cancel_job_async.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_CancelJob_async", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "DataplexService" + }, + "shortName": "CancelJob" + } + }, + "file": "dataplex_generated_dataplex_v1_dataplex_service_cancel_job_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_CancelJob_sync", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "DataplexService" + }, + "shortName": "CreateAsset" + } + }, + "file": "dataplex_generated_dataplex_v1_dataplex_service_create_asset_async.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_CreateAsset_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "DataplexService" + }, + "shortName": "CreateAsset" + } + }, + "file": "dataplex_generated_dataplex_v1_dataplex_service_create_asset_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_CreateAsset_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "DataplexService" + }, + "shortName": "CreateEnvironment" + } + }, + "file": "dataplex_generated_dataplex_v1_dataplex_service_create_environment_async.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_CreateEnvironment_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "DataplexService" + }, + "shortName": "CreateEnvironment" + } + }, + "file": "dataplex_generated_dataplex_v1_dataplex_service_create_environment_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_CreateEnvironment_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "DataplexService" + }, + "shortName": "CreateLake" + } + }, + "file": "dataplex_generated_dataplex_v1_dataplex_service_create_lake_async.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_CreateLake_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "DataplexService" + }, + "shortName": "CreateLake" + } + }, + "file": "dataplex_generated_dataplex_v1_dataplex_service_create_lake_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_CreateLake_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "DataplexService" + }, + "shortName": "CreateTask" + } + }, + "file": "dataplex_generated_dataplex_v1_dataplex_service_create_task_async.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_CreateTask_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "DataplexService" + }, + "shortName": "CreateTask" + } + }, + "file": "dataplex_generated_dataplex_v1_dataplex_service_create_task_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_CreateTask_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "DataplexService" + }, + "shortName": "CreateZone" + } + }, + "file": "dataplex_generated_dataplex_v1_dataplex_service_create_zone_async.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_CreateZone_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "DataplexService" + }, + "shortName": "CreateZone" + } + }, + "file": "dataplex_generated_dataplex_v1_dataplex_service_create_zone_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_CreateZone_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "DataplexService" + }, + "shortName": "DeleteAsset" + } + }, + "file": "dataplex_generated_dataplex_v1_dataplex_service_delete_asset_async.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_DeleteAsset_async", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "DataplexService" + }, + "shortName": "DeleteAsset" + } + }, + "file": "dataplex_generated_dataplex_v1_dataplex_service_delete_asset_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_DeleteAsset_sync", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "DataplexService" + }, + "shortName": "DeleteEnvironment" + } + }, + "file": "dataplex_generated_dataplex_v1_dataplex_service_delete_environment_async.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_DeleteEnvironment_async", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "DataplexService" + }, + "shortName": "DeleteEnvironment" + } + }, + "file": "dataplex_generated_dataplex_v1_dataplex_service_delete_environment_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_DeleteEnvironment_sync", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "DataplexService" + }, + "shortName": "DeleteLake" + } + }, + "file": "dataplex_generated_dataplex_v1_dataplex_service_delete_lake_async.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_DeleteLake_async", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "DataplexService" + }, + "shortName": "DeleteLake" + } + }, + "file": "dataplex_generated_dataplex_v1_dataplex_service_delete_lake_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_DeleteLake_sync", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "DataplexService" + }, + "shortName": "DeleteTask" + } + }, + "file": "dataplex_generated_dataplex_v1_dataplex_service_delete_task_async.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_DeleteTask_async", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "DataplexService" + }, + "shortName": "DeleteTask" + } + }, + "file": "dataplex_generated_dataplex_v1_dataplex_service_delete_task_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_DeleteTask_sync", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "DataplexService" + }, + "shortName": "DeleteZone" + } + }, + "file": "dataplex_generated_dataplex_v1_dataplex_service_delete_zone_async.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_DeleteZone_async", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "DataplexService" + }, + "shortName": "DeleteZone" + } + }, + "file": "dataplex_generated_dataplex_v1_dataplex_service_delete_zone_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_DeleteZone_sync", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "DataplexService" + }, + "shortName": "GetAsset" + } + }, + "file": "dataplex_generated_dataplex_v1_dataplex_service_get_asset_async.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_GetAsset_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "DataplexService" + }, + "shortName": "GetAsset" + } + }, + "file": "dataplex_generated_dataplex_v1_dataplex_service_get_asset_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_GetAsset_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "DataplexService" + }, + "shortName": "GetEnvironment" + } + }, + "file": "dataplex_generated_dataplex_v1_dataplex_service_get_environment_async.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_GetEnvironment_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "DataplexService" + }, + "shortName": "GetEnvironment" + } + }, + "file": "dataplex_generated_dataplex_v1_dataplex_service_get_environment_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_GetEnvironment_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "DataplexService" + }, + "shortName": "GetJob" + } + }, + "file": "dataplex_generated_dataplex_v1_dataplex_service_get_job_async.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_GetJob_async", "segments": [ { - "end": 56, + "end": 44, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 44, "start": 27, "type": "SHORT" }, @@ -292,18 +1623,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 41, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 45, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -314,19 +1645,19 @@ "service": { "shortName": "DataplexService" }, - "shortName": "CreateTask" + "shortName": "GetJob" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_create_task_sync.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_CreateTask_sync", + "file": "dataplex_generated_dataplex_v1_dataplex_service_get_job_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_GetJob_sync", "segments": [ { - "end": 56, + "end": 44, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 44, "start": 27, "type": "SHORT" }, @@ -336,18 +1667,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 41, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 45, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -359,19 +1690,19 @@ "service": { "shortName": "DataplexService" }, - "shortName": "CreateZone" + "shortName": "GetLake" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_create_zone_async.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_CreateZone_async", + "file": "dataplex_generated_dataplex_v1_dataplex_service_get_lake_async.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_GetLake_async", "segments": [ { - "end": 54, + "end": 44, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 44, "start": 27, "type": "SHORT" }, @@ -381,18 +1712,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 45, + "end": 41, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 45, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -403,19 +1734,19 @@ "service": { "shortName": "DataplexService" }, - "shortName": "CreateZone" + "shortName": "GetLake" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_create_zone_sync.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_CreateZone_sync", + "file": "dataplex_generated_dataplex_v1_dataplex_service_get_lake_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_GetLake_sync", "segments": [ { - "end": 54, + "end": 44, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 44, "start": 27, "type": "SHORT" }, @@ -425,18 +1756,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 45, + "end": 41, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 45, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -448,19 +1779,19 @@ "service": { "shortName": "DataplexService" }, - "shortName": "DeleteAsset" + "shortName": "GetTask" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_delete_asset_async.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_DeleteAsset_async", + "file": "dataplex_generated_dataplex_v1_dataplex_service_get_task_async.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_GetTask_async", "segments": [ { - "end": 48, + "end": 44, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 44, "start": 27, "type": "SHORT" }, @@ -475,13 +1806,13 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 45, + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 45, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -492,19 +1823,19 @@ "service": { "shortName": "DataplexService" }, - "shortName": "DeleteAsset" + "shortName": "GetTask" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_delete_asset_sync.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_DeleteAsset_sync", + "file": "dataplex_generated_dataplex_v1_dataplex_service_get_task_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_GetTask_sync", "segments": [ { - "end": 48, + "end": 44, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 44, "start": 27, "type": "SHORT" }, @@ -519,13 +1850,13 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 45, + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 45, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -537,19 +1868,19 @@ "service": { "shortName": "DataplexService" }, - "shortName": "DeleteLake" + "shortName": "GetZone" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_delete_lake_async.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_DeleteLake_async", + "file": "dataplex_generated_dataplex_v1_dataplex_service_get_zone_async.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_GetZone_async", "segments": [ { - "end": 48, + "end": 44, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 44, "start": 27, "type": "SHORT" }, @@ -564,13 +1895,13 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 45, + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 45, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -581,19 +1912,19 @@ "service": { "shortName": "DataplexService" }, - "shortName": "DeleteLake" + "shortName": "GetZone" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_delete_lake_sync.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_DeleteLake_sync", + "file": "dataplex_generated_dataplex_v1_dataplex_service_get_zone_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_GetZone_sync", "segments": [ { - "end": 48, + "end": 44, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 44, "start": 27, "type": "SHORT" }, @@ -608,13 +1939,13 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 45, + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 45, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -626,19 +1957,19 @@ "service": { "shortName": "DataplexService" }, - "shortName": "DeleteTask" + "shortName": "ListAssetActions" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_delete_task_async.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_DeleteTask_async", + "file": "dataplex_generated_dataplex_v1_dataplex_service_list_asset_actions_async.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_ListAssetActions_async", "segments": [ { - "end": 48, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 45, "start": 27, "type": "SHORT" }, @@ -653,13 +1984,13 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 45, + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -670,19 +2001,19 @@ "service": { "shortName": "DataplexService" }, - "shortName": "DeleteTask" + "shortName": "ListAssetActions" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_delete_task_sync.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_DeleteTask_sync", + "file": "dataplex_generated_dataplex_v1_dataplex_service_list_asset_actions_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_ListAssetActions_sync", "segments": [ { - "end": 48, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 45, "start": 27, "type": "SHORT" }, @@ -697,13 +2028,13 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 45, + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -715,19 +2046,19 @@ "service": { "shortName": "DataplexService" }, - "shortName": "DeleteZone" + "shortName": "ListAssets" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_delete_zone_async.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_DeleteZone_async", + "file": "dataplex_generated_dataplex_v1_dataplex_service_list_assets_async.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_ListAssets_async", "segments": [ { - "end": 48, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 45, "start": 27, "type": "SHORT" }, @@ -742,13 +2073,13 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 45, + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -759,19 +2090,19 @@ "service": { "shortName": "DataplexService" }, - "shortName": "DeleteZone" + "shortName": "ListAssets" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_delete_zone_sync.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_DeleteZone_sync", + "file": "dataplex_generated_dataplex_v1_dataplex_service_list_assets_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_ListAssets_sync", "segments": [ { - "end": 48, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 45, "start": 27, "type": "SHORT" }, @@ -786,13 +2117,13 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 45, + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -804,19 +2135,19 @@ "service": { "shortName": "DataplexService" }, - "shortName": "GetAsset" + "shortName": "ListEnvironments" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_get_asset_async.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_GetAsset_async", + "file": "dataplex_generated_dataplex_v1_dataplex_service_list_environments_async.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_ListEnvironments_async", "segments": [ { - "end": 44, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 45, "start": 27, "type": "SHORT" }, @@ -836,7 +2167,7 @@ "type": "REQUEST_EXECUTION" }, { - "end": 45, + "end": 46, "start": 42, "type": "RESPONSE_HANDLING" } @@ -848,19 +2179,19 @@ "service": { "shortName": "DataplexService" }, - "shortName": "GetAsset" + "shortName": "ListEnvironments" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_get_asset_sync.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_GetAsset_sync", + "file": "dataplex_generated_dataplex_v1_dataplex_service_list_environments_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_ListEnvironments_sync", "segments": [ { - "end": 44, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 45, "start": 27, "type": "SHORT" }, @@ -880,7 +2211,7 @@ "type": "REQUEST_EXECUTION" }, { - "end": 45, + "end": 46, "start": 42, "type": "RESPONSE_HANDLING" } @@ -893,19 +2224,19 @@ "service": { "shortName": "DataplexService" }, - "shortName": "GetJob" + "shortName": "ListJobs" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_get_job_async.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_GetJob_async", + "file": "dataplex_generated_dataplex_v1_dataplex_service_list_jobs_async.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_ListJobs_async", "segments": [ { - "end": 44, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 45, "start": 27, "type": "SHORT" }, @@ -925,7 +2256,7 @@ "type": "REQUEST_EXECUTION" }, { - "end": 45, + "end": 46, "start": 42, "type": "RESPONSE_HANDLING" } @@ -937,19 +2268,19 @@ "service": { "shortName": "DataplexService" }, - "shortName": "GetJob" + "shortName": "ListJobs" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_get_job_sync.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_GetJob_sync", + "file": "dataplex_generated_dataplex_v1_dataplex_service_list_jobs_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_ListJobs_sync", "segments": [ { - "end": 44, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 45, "start": 27, "type": "SHORT" }, @@ -969,7 +2300,7 @@ "type": "REQUEST_EXECUTION" }, { - "end": 45, + "end": 46, "start": 42, "type": "RESPONSE_HANDLING" } @@ -982,19 +2313,19 @@ "service": { "shortName": "DataplexService" }, - "shortName": "GetLake" + "shortName": "ListLakeActions" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_get_lake_async.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_GetLake_async", + "file": "dataplex_generated_dataplex_v1_dataplex_service_list_lake_actions_async.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_ListLakeActions_async", "segments": [ { - "end": 44, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 45, "start": 27, "type": "SHORT" }, @@ -1014,7 +2345,7 @@ "type": "REQUEST_EXECUTION" }, { - "end": 45, + "end": 46, "start": 42, "type": "RESPONSE_HANDLING" } @@ -1026,19 +2357,19 @@ "service": { "shortName": "DataplexService" }, - "shortName": "GetLake" + "shortName": "ListLakeActions" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_get_lake_sync.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_GetLake_sync", + "file": "dataplex_generated_dataplex_v1_dataplex_service_list_lake_actions_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_ListLakeActions_sync", "segments": [ { - "end": 44, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 45, "start": 27, "type": "SHORT" }, @@ -1058,7 +2389,7 @@ "type": "REQUEST_EXECUTION" }, { - "end": 45, + "end": 46, "start": 42, "type": "RESPONSE_HANDLING" } @@ -1071,19 +2402,19 @@ "service": { "shortName": "DataplexService" }, - "shortName": "GetTask" + "shortName": "ListLakes" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_get_task_async.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_GetTask_async", + "file": "dataplex_generated_dataplex_v1_dataplex_service_list_lakes_async.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_ListLakes_async", "segments": [ { - "end": 44, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 45, "start": 27, "type": "SHORT" }, @@ -1103,7 +2434,7 @@ "type": "REQUEST_EXECUTION" }, { - "end": 45, + "end": 46, "start": 42, "type": "RESPONSE_HANDLING" } @@ -1115,19 +2446,19 @@ "service": { "shortName": "DataplexService" }, - "shortName": "GetTask" + "shortName": "ListLakes" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_get_task_sync.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_GetTask_sync", + "file": "dataplex_generated_dataplex_v1_dataplex_service_list_lakes_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_ListLakes_sync", "segments": [ { - "end": 44, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 45, "start": 27, "type": "SHORT" }, @@ -1147,7 +2478,7 @@ "type": "REQUEST_EXECUTION" }, { - "end": 45, + "end": 46, "start": 42, "type": "RESPONSE_HANDLING" } @@ -1160,19 +2491,19 @@ "service": { "shortName": "DataplexService" }, - "shortName": "GetZone" + "shortName": "ListSessions" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_get_zone_async.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_GetZone_async", + "file": "dataplex_generated_dataplex_v1_dataplex_service_list_sessions_async.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_ListSessions_async", "segments": [ { - "end": 44, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 45, "start": 27, "type": "SHORT" }, @@ -1192,7 +2523,7 @@ "type": "REQUEST_EXECUTION" }, { - "end": 45, + "end": 46, "start": 42, "type": "RESPONSE_HANDLING" } @@ -1204,19 +2535,19 @@ "service": { "shortName": "DataplexService" }, - "shortName": "GetZone" + "shortName": "ListSessions" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_get_zone_sync.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_GetZone_sync", + "file": "dataplex_generated_dataplex_v1_dataplex_service_list_sessions_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_ListSessions_sync", "segments": [ { - "end": 44, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 45, "start": 27, "type": "SHORT" }, @@ -1236,7 +2567,7 @@ "type": "REQUEST_EXECUTION" }, { - "end": 45, + "end": 46, "start": 42, "type": "RESPONSE_HANDLING" } @@ -1249,11 +2580,11 @@ "service": { "shortName": "DataplexService" }, - "shortName": "ListAssetActions" + "shortName": "ListTasks" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_list_asset_actions_async.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_ListAssetActions_async", + "file": "dataplex_generated_dataplex_v1_dataplex_service_list_tasks_async.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_ListTasks_async", "segments": [ { "end": 45, @@ -1293,11 +2624,11 @@ "service": { "shortName": "DataplexService" }, - "shortName": "ListAssetActions" + "shortName": "ListTasks" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_list_asset_actions_sync.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_ListAssetActions_sync", + "file": "dataplex_generated_dataplex_v1_dataplex_service_list_tasks_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_ListTasks_sync", "segments": [ { "end": 45, @@ -1338,11 +2669,11 @@ "service": { "shortName": "DataplexService" }, - "shortName": "ListAssets" + "shortName": "ListZoneActions" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_list_assets_async.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_ListAssets_async", + "file": "dataplex_generated_dataplex_v1_dataplex_service_list_zone_actions_async.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_ListZoneActions_async", "segments": [ { "end": 45, @@ -1382,11 +2713,11 @@ "service": { "shortName": "DataplexService" }, - "shortName": "ListAssets" + "shortName": "ListZoneActions" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_list_assets_sync.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_ListAssets_sync", + "file": "dataplex_generated_dataplex_v1_dataplex_service_list_zone_actions_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_ListZoneActions_sync", "segments": [ { "end": 45, @@ -1427,11 +2758,11 @@ "service": { "shortName": "DataplexService" }, - "shortName": "ListJobs" + "shortName": "ListZones" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_list_jobs_async.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_ListJobs_async", + "file": "dataplex_generated_dataplex_v1_dataplex_service_list_zones_async.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_ListZones_async", "segments": [ { "end": 45, @@ -1471,11 +2802,11 @@ "service": { "shortName": "DataplexService" }, - "shortName": "ListJobs" + "shortName": "ListZones" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_list_jobs_sync.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_ListJobs_sync", + "file": "dataplex_generated_dataplex_v1_dataplex_service_list_zones_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_ListZones_sync", "segments": [ { "end": 45, @@ -1516,19 +2847,19 @@ "service": { "shortName": "DataplexService" }, - "shortName": "ListLakeActions" + "shortName": "UpdateAsset" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_list_lake_actions_async.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_ListLakeActions_async", + "file": "dataplex_generated_dataplex_v1_dataplex_service_update_asset_async.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_UpdateAsset_async", "segments": [ { - "end": 45, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1538,18 +2869,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 38, + "end": 41, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 42, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ] @@ -1560,19 +2891,19 @@ "service": { "shortName": "DataplexService" }, - "shortName": "ListLakeActions" + "shortName": "UpdateAsset" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_list_lake_actions_sync.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_ListLakeActions_sync", + "file": "dataplex_generated_dataplex_v1_dataplex_service_update_asset_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_UpdateAsset_sync", "segments": [ { - "end": 45, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1582,18 +2913,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 38, + "end": 41, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 42, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ] @@ -1605,19 +2936,19 @@ "service": { "shortName": "DataplexService" }, - "shortName": "ListLakes" + "shortName": "UpdateEnvironment" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_list_lakes_async.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_ListLakes_async", + "file": "dataplex_generated_dataplex_v1_dataplex_service_update_environment_async.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_UpdateEnvironment_async", "segments": [ { - "end": 45, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1627,18 +2958,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 38, + "end": 41, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 42, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ] @@ -1649,19 +2980,19 @@ "service": { "shortName": "DataplexService" }, - "shortName": "ListLakes" + "shortName": "UpdateEnvironment" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_list_lakes_sync.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_ListLakes_sync", + "file": "dataplex_generated_dataplex_v1_dataplex_service_update_environment_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_UpdateEnvironment_sync", "segments": [ { - "end": 45, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1671,18 +3002,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 38, + "end": 41, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 42, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ] @@ -1694,19 +3025,19 @@ "service": { "shortName": "DataplexService" }, - "shortName": "ListTasks" + "shortName": "UpdateLake" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_list_tasks_async.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_ListTasks_async", + "file": "dataplex_generated_dataplex_v1_dataplex_service_update_lake_async.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_UpdateLake_async", "segments": [ { - "end": 45, + "end": 47, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 47, "start": 27, "type": "SHORT" }, @@ -1716,18 +3047,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 38, + "end": 37, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 44, + "start": 38, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 48, + "start": 45, "type": "RESPONSE_HANDLING" } ] @@ -1738,19 +3069,19 @@ "service": { "shortName": "DataplexService" }, - "shortName": "ListTasks" + "shortName": "UpdateLake" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_list_tasks_sync.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_ListTasks_sync", + "file": "dataplex_generated_dataplex_v1_dataplex_service_update_lake_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_UpdateLake_sync", "segments": [ { - "end": 45, + "end": 47, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 47, "start": 27, "type": "SHORT" }, @@ -1760,18 +3091,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 38, + "end": 37, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 44, + "start": 38, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 48, + "start": 45, "type": "RESPONSE_HANDLING" } ] @@ -1783,19 +3114,19 @@ "service": { "shortName": "DataplexService" }, - "shortName": "ListZoneActions" + "shortName": "UpdateTask" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_list_zone_actions_async.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_ListZoneActions_async", + "file": "dataplex_generated_dataplex_v1_dataplex_service_update_task_async.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_UpdateTask_async", "segments": [ { - "end": 45, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 54, "start": 27, "type": "SHORT" }, @@ -1805,18 +3136,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 38, + "end": 44, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ] @@ -1827,19 +3158,19 @@ "service": { "shortName": "DataplexService" }, - "shortName": "ListZoneActions" + "shortName": "UpdateTask" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_list_zone_actions_sync.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_ListZoneActions_sync", + "file": "dataplex_generated_dataplex_v1_dataplex_service_update_task_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_UpdateTask_sync", "segments": [ { - "end": 45, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 54, "start": 27, "type": "SHORT" }, @@ -1849,18 +3180,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 38, + "end": 44, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ] @@ -1872,19 +3203,19 @@ "service": { "shortName": "DataplexService" }, - "shortName": "ListZones" + "shortName": "UpdateZone" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_list_zones_async.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_ListZones_async", + "file": "dataplex_generated_dataplex_v1_dataplex_service_update_zone_async.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_UpdateZone_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1894,18 +3225,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 38, + "end": 42, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 49, + "start": 43, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ] @@ -1916,19 +3247,19 @@ "service": { "shortName": "DataplexService" }, - "shortName": "ListZones" + "shortName": "UpdateZone" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_list_zones_sync.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_ListZones_sync", + "file": "dataplex_generated_dataplex_v1_dataplex_service_update_zone_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_DataplexService_UpdateZone_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1938,18 +3269,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 38, + "end": 42, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 49, + "start": 43, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ] @@ -1959,21 +3290,21 @@ "async": true, "method": { "service": { - "shortName": "DataplexService" + "shortName": "MetadataService" }, - "shortName": "UpdateAsset" + "shortName": "CreateEntity" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_update_asset_async.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_UpdateAsset_async", + "file": "dataplex_generated_dataplex_v1_metadata_service_create_entity_async.py", + "regionTag": "dataplex_generated_dataplex_v1_MetadataService_CreateEntity_async", "segments": [ { - "end": 51, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 54, "start": 27, "type": "SHORT" }, @@ -1983,18 +3314,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 41, + "end": 48, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 42, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ] @@ -2003,21 +3334,21 @@ "clientMethod": { "method": { "service": { - "shortName": "DataplexService" + "shortName": "MetadataService" }, - "shortName": "UpdateAsset" + "shortName": "CreateEntity" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_update_asset_sync.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_UpdateAsset_sync", + "file": "dataplex_generated_dataplex_v1_metadata_service_create_entity_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_MetadataService_CreateEntity_sync", "segments": [ { - "end": 51, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 54, "start": 27, "type": "SHORT" }, @@ -2027,18 +3358,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 41, + "end": 48, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 42, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ] @@ -2048,21 +3379,21 @@ "async": true, "method": { "service": { - "shortName": "DataplexService" + "shortName": "MetadataService" }, - "shortName": "UpdateLake" + "shortName": "CreatePartition" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_update_lake_async.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_UpdateLake_async", + "file": "dataplex_generated_dataplex_v1_metadata_service_create_partition_async.py", + "regionTag": "dataplex_generated_dataplex_v1_MetadataService_CreatePartition_async", "segments": [ { - "end": 47, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 47, + "end": 49, "start": 27, "type": "SHORT" }, @@ -2072,18 +3403,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 37, + "end": 43, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 44, - "start": 38, + "end": 46, + "start": 44, "type": "REQUEST_EXECUTION" }, { - "end": 48, - "start": 45, + "end": 50, + "start": 47, "type": "RESPONSE_HANDLING" } ] @@ -2092,21 +3423,21 @@ "clientMethod": { "method": { "service": { - "shortName": "DataplexService" + "shortName": "MetadataService" }, - "shortName": "UpdateLake" + "shortName": "CreatePartition" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_update_lake_sync.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_UpdateLake_sync", + "file": "dataplex_generated_dataplex_v1_metadata_service_create_partition_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_MetadataService_CreatePartition_sync", "segments": [ { - "end": 47, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 47, + "end": 49, "start": 27, "type": "SHORT" }, @@ -2116,18 +3447,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 37, + "end": 43, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 44, - "start": 38, + "end": 46, + "start": 44, "type": "REQUEST_EXECUTION" }, { - "end": 48, - "start": 45, + "end": 50, + "start": 47, "type": "RESPONSE_HANDLING" } ] @@ -2137,21 +3468,21 @@ "async": true, "method": { "service": { - "shortName": "DataplexService" + "shortName": "MetadataService" }, - "shortName": "UpdateTask" + "shortName": "DeleteEntity" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_update_task_async.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_UpdateTask_async", + "file": "dataplex_generated_dataplex_v1_metadata_service_delete_entity_async.py", + "regionTag": "dataplex_generated_dataplex_v1_MetadataService_DeleteEntity_async", "segments": [ { - "end": 54, + "end": 43, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 43, "start": 27, "type": "SHORT" }, @@ -2161,18 +3492,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 39, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 45, + "start": 40, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 44, "type": "RESPONSE_HANDLING" } ] @@ -2181,21 +3510,21 @@ "clientMethod": { "method": { "service": { - "shortName": "DataplexService" + "shortName": "MetadataService" }, - "shortName": "UpdateTask" + "shortName": "DeleteEntity" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_update_task_sync.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_UpdateTask_sync", + "file": "dataplex_generated_dataplex_v1_metadata_service_delete_entity_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_MetadataService_DeleteEntity_sync", "segments": [ { - "end": 54, + "end": 43, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 43, "start": 27, "type": "SHORT" }, @@ -2205,18 +3534,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 39, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 45, + "start": 40, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 44, "type": "RESPONSE_HANDLING" } ] @@ -2226,21 +3553,21 @@ "async": true, "method": { "service": { - "shortName": "DataplexService" + "shortName": "MetadataService" }, - "shortName": "UpdateZone" + "shortName": "DeletePartition" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_update_zone_async.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_UpdateZone_async", + "file": "dataplex_generated_dataplex_v1_metadata_service_delete_partition_async.py", + "regionTag": "dataplex_generated_dataplex_v1_MetadataService_DeletePartition_async", "segments": [ { - "end": 52, + "end": 42, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 42, "start": 27, "type": "SHORT" }, @@ -2250,18 +3577,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 42, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 43, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 43, "type": "RESPONSE_HANDLING" } ] @@ -2270,21 +3595,21 @@ "clientMethod": { "method": { "service": { - "shortName": "DataplexService" + "shortName": "MetadataService" }, - "shortName": "UpdateZone" + "shortName": "DeletePartition" } }, - "file": "dataplex_generated_dataplex_v1_dataplex_service_update_zone_sync.py", - "regionTag": "dataplex_generated_dataplex_v1_DataplexService_UpdateZone_sync", + "file": "dataplex_generated_dataplex_v1_metadata_service_delete_partition_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_MetadataService_DeletePartition_sync", "segments": [ { - "end": 52, + "end": 42, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 42, "start": 27, "type": "SHORT" }, @@ -2294,18 +3619,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 42, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 43, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 43, "type": "RESPONSE_HANDLING" } ] @@ -2665,6 +3988,95 @@ "type": "RESPONSE_HANDLING" } ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "MetadataService" + }, + "shortName": "UpdateEntity" + } + }, + "file": "dataplex_generated_dataplex_v1_metadata_service_update_entity_async.py", + "regionTag": "dataplex_generated_dataplex_v1_MetadataService_UpdateEntity_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "MetadataService" + }, + "shortName": "UpdateEntity" + } + }, + "file": "dataplex_generated_dataplex_v1_metadata_service_update_entity_sync.py", + "regionTag": "dataplex_generated_dataplex_v1_MetadataService_UpdateEntity_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ] } ] } diff --git a/packages/google-cloud-dataplex/scripts/fixup_dataplex_v1_keywords.py b/packages/google-cloud-dataplex/scripts/fixup_dataplex_v1_keywords.py index 4496f0558f38..3c85b165cbe6 100644 --- a/packages/google-cloud-dataplex/scripts/fixup_dataplex_v1_keywords.py +++ b/packages/google-cloud-dataplex/scripts/fixup_dataplex_v1_keywords.py @@ -41,15 +41,25 @@ class dataplexCallTransformer(cst.CSTTransformer): METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'cancel_job': ('name', ), 'create_asset': ('parent', 'asset_id', 'asset', 'validate_only', ), + 'create_content': ('parent', 'content', 'validate_only', ), + 'create_entity': ('parent', 'entity', 'validate_only', ), + 'create_environment': ('parent', 'environment_id', 'environment', 'validate_only', ), 'create_lake': ('parent', 'lake_id', 'lake', 'validate_only', ), + 'create_partition': ('parent', 'partition', 'validate_only', ), 'create_task': ('parent', 'task_id', 'task', 'validate_only', ), 'create_zone': ('parent', 'zone_id', 'zone', 'validate_only', ), 'delete_asset': ('name', ), + 'delete_content': ('name', ), + 'delete_entity': ('name', 'etag', ), + 'delete_environment': ('name', ), 'delete_lake': ('name', ), + 'delete_partition': ('name', 'etag', ), 'delete_task': ('name', ), 'delete_zone': ('name', ), 'get_asset': ('name', ), + 'get_content': ('name', 'view', ), 'get_entity': ('name', 'view', ), + 'get_environment': ('name', ), 'get_job': ('name', ), 'get_lake': ('name', ), 'get_partition': ('name', ), @@ -57,15 +67,21 @@ class dataplexCallTransformer(cst.CSTTransformer): 'get_zone': ('name', ), 'list_asset_actions': ('parent', 'page_size', 'page_token', ), 'list_assets': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_content': ('parent', 'page_size', 'page_token', 'filter', ), 'list_entities': ('parent', 'view', 'page_size', 'page_token', 'filter', ), + 'list_environments': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_jobs': ('parent', 'page_size', 'page_token', ), 'list_lake_actions': ('parent', 'page_size', 'page_token', ), 'list_lakes': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_partitions': ('parent', 'page_size', 'page_token', 'filter', ), + 'list_sessions': ('parent', 'page_size', 'page_token', ), 'list_tasks': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_zone_actions': ('parent', 'page_size', 'page_token', ), 'list_zones': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'update_asset': ('update_mask', 'asset', 'validate_only', ), + 'update_content': ('update_mask', 'content', 'validate_only', ), + 'update_entity': ('entity', 'validate_only', ), + 'update_environment': ('update_mask', 'environment', 'validate_only', ), 'update_lake': ('update_mask', 'lake', 'validate_only', ), 'update_task': ('update_mask', 'task', 'validate_only', ), 'update_zone': ('update_mask', 'zone', 'validate_only', ), diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_content_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_content_service.py new file mode 100644 index 000000000000..3db500574171 --- /dev/null +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_content_service.py @@ -0,0 +1,2500 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.dataplex_v1.services.content_service import ContentServiceAsyncClient +from google.cloud.dataplex_v1.services.content_service import ContentServiceClient +from google.cloud.dataplex_v1.services.content_service import pagers +from google.cloud.dataplex_v1.services.content_service import transports +from google.cloud.dataplex_v1.types import analyze +from google.cloud.dataplex_v1.types import content +from google.cloud.dataplex_v1.types import content as gcd_content +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ContentServiceClient._get_default_mtls_endpoint(None) is None + assert ( + ContentServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + ContentServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + ContentServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ContentServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ContentServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class", [ContentServiceClient, ContentServiceAsyncClient,] +) +def test_content_service_client_from_service_account_info(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "dataplex.googleapis.com:443" + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.ContentServiceGrpcTransport, "grpc"), + (transports.ContentServiceGrpcAsyncIOTransport, "grpc_asyncio"), + ], +) +def test_content_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class", [ContentServiceClient, ContentServiceAsyncClient,] +) +def test_content_service_client_from_service_account_file(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "dataplex.googleapis.com:443" + + +def test_content_service_client_get_transport_class(): + transport = ContentServiceClient.get_transport_class() + available_transports = [ + transports.ContentServiceGrpcTransport, + ] + assert transport in available_transports + + transport = ContentServiceClient.get_transport_class("grpc") + assert transport == transports.ContentServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ContentServiceClient, transports.ContentServiceGrpcTransport, "grpc"), + ( + ContentServiceAsyncClient, + transports.ContentServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + ContentServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ContentServiceClient), +) +@mock.patch.object( + ContentServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ContentServiceAsyncClient), +) +def test_content_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ContentServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ContentServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (ContentServiceClient, transports.ContentServiceGrpcTransport, "grpc", "true"), + ( + ContentServiceAsyncClient, + transports.ContentServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (ContentServiceClient, transports.ContentServiceGrpcTransport, "grpc", "false"), + ( + ContentServiceAsyncClient, + transports.ContentServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + ContentServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ContentServiceClient), +) +@mock.patch.object( + ContentServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ContentServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_content_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize( + "client_class", [ContentServiceClient, ContentServiceAsyncClient] +) +@mock.patch.object( + ContentServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ContentServiceClient), +) +@mock.patch.object( + ContentServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ContentServiceAsyncClient), +) +def test_content_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ContentServiceClient, transports.ContentServiceGrpcTransport, "grpc"), + ( + ContentServiceAsyncClient, + transports.ContentServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_content_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions(scopes=["1", "2"],) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ContentServiceClient, + transports.ContentServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ContentServiceAsyncClient, + transports.ContentServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_content_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_content_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.dataplex_v1.services.content_service.transports.ContentServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = ContentServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ContentServiceClient, + transports.ContentServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + ContentServiceAsyncClient, + transports.ContentServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_content_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "dataplex.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="dataplex.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [gcd_content.CreateContentRequest, dict,]) +def test_create_content(request_type, transport: str = "grpc"): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = analyze.Content( + name="name_value", + uid="uid_value", + path="path_value", + description="description_value", + data_text="data_text_value", + sql_script=analyze.Content.SqlScript( + engine=analyze.Content.SqlScript.QueryEngine.SPARK + ), + ) + response = client.create_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == gcd_content.CreateContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, analyze.Content) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.path == "path_value" + assert response.description == "description_value" + + +def test_create_content_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_content), "__call__") as call: + client.create_content() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcd_content.CreateContentRequest() + + +@pytest.mark.asyncio +async def test_create_content_async( + transport: str = "grpc_asyncio", request_type=gcd_content.CreateContentRequest +): + client = ContentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analyze.Content( + name="name_value", + uid="uid_value", + path="path_value", + description="description_value", + ) + ) + response = await client.create_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == gcd_content.CreateContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, analyze.Content) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.path == "path_value" + assert response.description == "description_value" + + +@pytest.mark.asyncio +async def test_create_content_async_from_dict(): + await test_create_content_async(request_type=dict) + + +def test_create_content_field_headers(): + client = ContentServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcd_content.CreateContentRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_content), "__call__") as call: + call.return_value = analyze.Content() + client.create_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_content_field_headers_async(): + client = ContentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcd_content.CreateContentRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_content), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content()) + await client.create_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_content_flattened(): + client = ContentServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = analyze.Content() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_content( + parent="parent_value", content=analyze.Content(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].content + mock_val = analyze.Content(name="name_value") + assert arg == mock_val + + +def test_create_content_flattened_error(): + client = ContentServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_content( + gcd_content.CreateContentRequest(), + parent="parent_value", + content=analyze.Content(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_content_flattened_async(): + client = ContentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = analyze.Content() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_content( + parent="parent_value", content=analyze.Content(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].content + mock_val = analyze.Content(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_content_flattened_error_async(): + client = ContentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_content( + gcd_content.CreateContentRequest(), + parent="parent_value", + content=analyze.Content(name="name_value"), + ) + + +@pytest.mark.parametrize("request_type", [gcd_content.UpdateContentRequest, dict,]) +def test_update_content(request_type, transport: str = "grpc"): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = analyze.Content( + name="name_value", + uid="uid_value", + path="path_value", + description="description_value", + data_text="data_text_value", + sql_script=analyze.Content.SqlScript( + engine=analyze.Content.SqlScript.QueryEngine.SPARK + ), + ) + response = client.update_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == gcd_content.UpdateContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, analyze.Content) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.path == "path_value" + assert response.description == "description_value" + + +def test_update_content_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_content), "__call__") as call: + client.update_content() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcd_content.UpdateContentRequest() + + +@pytest.mark.asyncio +async def test_update_content_async( + transport: str = "grpc_asyncio", request_type=gcd_content.UpdateContentRequest +): + client = ContentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analyze.Content( + name="name_value", + uid="uid_value", + path="path_value", + description="description_value", + ) + ) + response = await client.update_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == gcd_content.UpdateContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, analyze.Content) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.path == "path_value" + assert response.description == "description_value" + + +@pytest.mark.asyncio +async def test_update_content_async_from_dict(): + await test_update_content_async(request_type=dict) + + +def test_update_content_field_headers(): + client = ContentServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcd_content.UpdateContentRequest() + + request.content.name = "content.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_content), "__call__") as call: + call.return_value = analyze.Content() + client.update_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "content.name=content.name/value",) in kw[ + "metadata" + ] + + +@pytest.mark.asyncio +async def test_update_content_field_headers_async(): + client = ContentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcd_content.UpdateContentRequest() + + request.content.name = "content.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_content), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content()) + await client.update_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "content.name=content.name/value",) in kw[ + "metadata" + ] + + +def test_update_content_flattened(): + client = ContentServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = analyze.Content() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_content( + content=analyze.Content(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].content + mock_val = analyze.Content(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_content_flattened_error(): + client = ContentServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_content( + gcd_content.UpdateContentRequest(), + content=analyze.Content(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_content_flattened_async(): + client = ContentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = analyze.Content() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_content( + content=analyze.Content(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].content + mock_val = analyze.Content(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_content_flattened_error_async(): + client = ContentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_content( + gcd_content.UpdateContentRequest(), + content=analyze.Content(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize("request_type", [content.DeleteContentRequest, dict,]) +def test_delete_content(request_type, transport: str = "grpc"): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == content.DeleteContentRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_content_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_content), "__call__") as call: + client.delete_content() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == content.DeleteContentRequest() + + +@pytest.mark.asyncio +async def test_delete_content_async( + transport: str = "grpc_asyncio", request_type=content.DeleteContentRequest +): + client = ContentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == content.DeleteContentRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_content_async_from_dict(): + await test_delete_content_async(request_type=dict) + + +def test_delete_content_field_headers(): + client = ContentServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = content.DeleteContentRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_content), "__call__") as call: + call.return_value = None + client.delete_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_content_field_headers_async(): + client = ContentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = content.DeleteContentRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_content), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_content_flattened(): + client = ContentServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_content(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_content_flattened_error(): + client = ContentServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_content( + content.DeleteContentRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_content_flattened_async(): + client = ContentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_content(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_content_flattened_error_async(): + client = ContentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_content( + content.DeleteContentRequest(), name="name_value", + ) + + +@pytest.mark.parametrize("request_type", [content.GetContentRequest, dict,]) +def test_get_content(request_type, transport: str = "grpc"): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = analyze.Content( + name="name_value", + uid="uid_value", + path="path_value", + description="description_value", + data_text="data_text_value", + sql_script=analyze.Content.SqlScript( + engine=analyze.Content.SqlScript.QueryEngine.SPARK + ), + ) + response = client.get_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == content.GetContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, analyze.Content) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.path == "path_value" + assert response.description == "description_value" + + +def test_get_content_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_content), "__call__") as call: + client.get_content() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == content.GetContentRequest() + + +@pytest.mark.asyncio +async def test_get_content_async( + transport: str = "grpc_asyncio", request_type=content.GetContentRequest +): + client = ContentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analyze.Content( + name="name_value", + uid="uid_value", + path="path_value", + description="description_value", + ) + ) + response = await client.get_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == content.GetContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, analyze.Content) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.path == "path_value" + assert response.description == "description_value" + + +@pytest.mark.asyncio +async def test_get_content_async_from_dict(): + await test_get_content_async(request_type=dict) + + +def test_get_content_field_headers(): + client = ContentServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = content.GetContentRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_content), "__call__") as call: + call.return_value = analyze.Content() + client.get_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_content_field_headers_async(): + client = ContentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = content.GetContentRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_content), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content()) + await client.get_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_content_flattened(): + client = ContentServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = analyze.Content() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_content(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_content_flattened_error(): + client = ContentServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_content( + content.GetContentRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_content_flattened_async(): + client = ContentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = analyze.Content() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Content()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_content(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_content_flattened_error_async(): + client = ContentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_content( + content.GetContentRequest(), name="name_value", + ) + + +@pytest.mark.parametrize("request_type", [content.ListContentRequest, dict,]) +def test_list_content(request_type, transport: str = "grpc"): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = content.ListContentResponse( + next_page_token="next_page_token_value", + ) + response = client.list_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == content.ListContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListContentPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_content_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_content), "__call__") as call: + client.list_content() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == content.ListContentRequest() + + +@pytest.mark.asyncio +async def test_list_content_async( + transport: str = "grpc_asyncio", request_type=content.ListContentRequest +): + client = ContentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + content.ListContentResponse(next_page_token="next_page_token_value",) + ) + response = await client.list_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == content.ListContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListContentAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_content_async_from_dict(): + await test_list_content_async(request_type=dict) + + +def test_list_content_field_headers(): + client = ContentServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = content.ListContentRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_content), "__call__") as call: + call.return_value = content.ListContentResponse() + client.list_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_content_field_headers_async(): + client = ContentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = content.ListContentRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_content), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + content.ListContentResponse() + ) + await client.list_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_content_flattened(): + client = ContentServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = content.ListContentResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_content(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_content_flattened_error(): + client = ContentServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_content( + content.ListContentRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_content_flattened_async(): + client = ContentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = content.ListContentResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + content.ListContentResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_content(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_content_flattened_error_async(): + client = ContentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_content( + content.ListContentRequest(), parent="parent_value", + ) + + +def test_list_content_pager(transport_name: str = "grpc"): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_content), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + content.ListContentResponse( + content=[analyze.Content(), analyze.Content(), analyze.Content(),], + next_page_token="abc", + ), + content.ListContentResponse(content=[], next_page_token="def",), + content.ListContentResponse( + content=[analyze.Content(),], next_page_token="ghi", + ), + content.ListContentResponse( + content=[analyze.Content(), analyze.Content(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_content(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, analyze.Content) for i in results) + + +def test_list_content_pages(transport_name: str = "grpc"): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_content), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + content.ListContentResponse( + content=[analyze.Content(), analyze.Content(), analyze.Content(),], + next_page_token="abc", + ), + content.ListContentResponse(content=[], next_page_token="def",), + content.ListContentResponse( + content=[analyze.Content(),], next_page_token="ghi", + ), + content.ListContentResponse( + content=[analyze.Content(), analyze.Content(),], + ), + RuntimeError, + ) + pages = list(client.list_content(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_content_async_pager(): + client = ContentServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_content), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + content.ListContentResponse( + content=[analyze.Content(), analyze.Content(), analyze.Content(),], + next_page_token="abc", + ), + content.ListContentResponse(content=[], next_page_token="def",), + content.ListContentResponse( + content=[analyze.Content(),], next_page_token="ghi", + ), + content.ListContentResponse( + content=[analyze.Content(), analyze.Content(),], + ), + RuntimeError, + ) + async_pager = await client.list_content(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, analyze.Content) for i in responses) + + +@pytest.mark.asyncio +async def test_list_content_async_pages(): + client = ContentServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_content), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + content.ListContentResponse( + content=[analyze.Content(), analyze.Content(), analyze.Content(),], + next_page_token="abc", + ), + content.ListContentResponse(content=[], next_page_token="def",), + content.ListContentResponse( + content=[analyze.Content(),], next_page_token="ghi", + ), + content.ListContentResponse( + content=[analyze.Content(), analyze.Content(),], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_content(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ContentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ContentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ContentServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ContentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ContentServiceClient(client_options=options, transport=transport,) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ContentServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ContentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ContentServiceClient( + client_options={"scopes": ["1", "2"]}, transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ContentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ContentServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ContentServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ContentServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ContentServiceGrpcTransport, + transports.ContentServiceGrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ContentServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + assert isinstance(client.transport, transports.ContentServiceGrpcTransport,) + + +def test_content_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ContentServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_content_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.dataplex_v1.services.content_service.transports.ContentServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ContentServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_content", + "update_content", + "delete_content", + "get_content", + "list_content", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + +def test_content_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.dataplex_v1.services.content_service.transports.ContentServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ContentServiceTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_content_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.dataplex_v1.services.content_service.transports.ContentServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ContentServiceTransport() + adc.assert_called_once() + + +def test_content_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ContentServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ContentServiceGrpcTransport, + transports.ContentServiceGrpcAsyncIOTransport, + ], +) +def test_content_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ContentServiceGrpcTransport, grpc_helpers), + (transports.ContentServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_content_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "dataplex.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="dataplex.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ContentServiceGrpcTransport, + transports.ContentServiceGrpcAsyncIOTransport, + ], +) +def test_content_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_content_service_host_no_port(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="dataplex.googleapis.com" + ), + ) + assert client.transport._host == "dataplex.googleapis.com:443" + + +def test_content_service_host_with_port(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="dataplex.googleapis.com:8000" + ), + ) + assert client.transport._host == "dataplex.googleapis.com:8000" + + +def test_content_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ContentServiceGrpcTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_content_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ContentServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ContentServiceGrpcTransport, + transports.ContentServiceGrpcAsyncIOTransport, + ], +) +def test_content_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.ContentServiceGrpcTransport, + transports.ContentServiceGrpcAsyncIOTransport, + ], +) +def test_content_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_content_path(): + project = "squid" + location = "clam" + lake = "whelk" + content = "octopus" + expected = "projects/{project}/locations/{location}/lakes/{lake}/content/{content}".format( + project=project, location=location, lake=lake, content=content, + ) + actual = ContentServiceClient.content_path(project, location, lake, content) + assert expected == actual + + +def test_parse_content_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "lake": "cuttlefish", + "content": "mussel", + } + path = ContentServiceClient.content_path(**expected) + + # Check that the path construction is reversible. + actual = ContentServiceClient.parse_content_path(path) + assert expected == actual + + +def test_lake_path(): + project = "winkle" + location = "nautilus" + lake = "scallop" + expected = "projects/{project}/locations/{location}/lakes/{lake}".format( + project=project, location=location, lake=lake, + ) + actual = ContentServiceClient.lake_path(project, location, lake) + assert expected == actual + + +def test_parse_lake_path(): + expected = { + "project": "abalone", + "location": "squid", + "lake": "clam", + } + path = ContentServiceClient.lake_path(**expected) + + # Check that the path construction is reversible. + actual = ContentServiceClient.parse_lake_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "whelk" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ContentServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "octopus", + } + path = ContentServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ContentServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + expected = "folders/{folder}".format(folder=folder,) + actual = ContentServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = ContentServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ContentServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + expected = "organizations/{organization}".format(organization=organization,) + actual = ContentServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = ContentServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ContentServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + expected = "projects/{project}".format(project=project,) + actual = ContentServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = ContentServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ContentServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + expected = "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + actual = ContentServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = ContentServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ContentServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ContentServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ContentServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ContentServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = ContentServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "grpc", + ] + for transport in transports: + client = ContentServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (ContentServiceClient, transports.ContentServiceGrpcTransport), + (ContentServiceAsyncClient, transports.ContentServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_dataplex_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_dataplex_service.py index 268ecaec35a2..c4a4736f8d03 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_dataplex_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_dataplex_service.py @@ -41,6 +41,7 @@ from google.cloud.dataplex_v1.services.dataplex_service import DataplexServiceClient from google.cloud.dataplex_v1.services.dataplex_service import pagers from google.cloud.dataplex_v1.services.dataplex_service import transports +from google.cloud.dataplex_v1.types import analyze from google.cloud.dataplex_v1.types import resources from google.cloud.dataplex_v1.types import service from google.cloud.dataplex_v1.types import tasks @@ -7188,6 +7189,1612 @@ async def test_cancel_job_flattened_error_async(): ) +@pytest.mark.parametrize("request_type", [service.CreateEnvironmentRequest, dict,]) +def test_create_environment(request_type, transport: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_environment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateEnvironmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_environment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_environment), "__call__" + ) as call: + client.create_environment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateEnvironmentRequest() + + +@pytest.mark.asyncio +async def test_create_environment_async( + transport: str = "grpc_asyncio", request_type=service.CreateEnvironmentRequest +): + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_environment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.CreateEnvironmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_environment_async_from_dict(): + await test_create_environment_async(request_type=dict) + + +def test_create_environment_field_headers(): + client = DataplexServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateEnvironmentRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_environment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_environment_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.CreateEnvironmentRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_environment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_environment_flattened(): + client = DataplexServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_environment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_environment( + parent="parent_value", + environment=analyze.Environment(name="name_value"), + environment_id="environment_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].environment + mock_val = analyze.Environment(name="name_value") + assert arg == mock_val + arg = args[0].environment_id + mock_val = "environment_id_value" + assert arg == mock_val + + +def test_create_environment_flattened_error(): + client = DataplexServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_environment( + service.CreateEnvironmentRequest(), + parent="parent_value", + environment=analyze.Environment(name="name_value"), + environment_id="environment_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_environment_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_environment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_environment( + parent="parent_value", + environment=analyze.Environment(name="name_value"), + environment_id="environment_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].environment + mock_val = analyze.Environment(name="name_value") + assert arg == mock_val + arg = args[0].environment_id + mock_val = "environment_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_environment_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_environment( + service.CreateEnvironmentRequest(), + parent="parent_value", + environment=analyze.Environment(name="name_value"), + environment_id="environment_id_value", + ) + + +@pytest.mark.parametrize("request_type", [service.UpdateEnvironmentRequest, dict,]) +def test_update_environment(request_type, transport: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_environment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateEnvironmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_environment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_environment), "__call__" + ) as call: + client.update_environment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateEnvironmentRequest() + + +@pytest.mark.asyncio +async def test_update_environment_async( + transport: str = "grpc_asyncio", request_type=service.UpdateEnvironmentRequest +): + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_environment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.UpdateEnvironmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_environment_async_from_dict(): + await test_update_environment_async(request_type=dict) + + +def test_update_environment_field_headers(): + client = DataplexServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateEnvironmentRequest() + + request.environment.name = "environment.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_environment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "environment.name=environment.name/value",) in kw[ + "metadata" + ] + + +@pytest.mark.asyncio +async def test_update_environment_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.UpdateEnvironmentRequest() + + request.environment.name = "environment.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_environment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "environment.name=environment.name/value",) in kw[ + "metadata" + ] + + +def test_update_environment_flattened(): + client = DataplexServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_environment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_environment( + environment=analyze.Environment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].environment + mock_val = analyze.Environment(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_environment_flattened_error(): + client = DataplexServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_environment( + service.UpdateEnvironmentRequest(), + environment=analyze.Environment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_environment_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_environment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_environment( + environment=analyze.Environment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].environment + mock_val = analyze.Environment(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_environment_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_environment( + service.UpdateEnvironmentRequest(), + environment=analyze.Environment(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize("request_type", [service.DeleteEnvironmentRequest, dict,]) +def test_delete_environment(request_type, transport: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_environment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteEnvironmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_environment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_environment), "__call__" + ) as call: + client.delete_environment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteEnvironmentRequest() + + +@pytest.mark.asyncio +async def test_delete_environment_async( + transport: str = "grpc_asyncio", request_type=service.DeleteEnvironmentRequest +): + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_environment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.DeleteEnvironmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_environment_async_from_dict(): + await test_delete_environment_async(request_type=dict) + + +def test_delete_environment_field_headers(): + client = DataplexServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteEnvironmentRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_environment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_environment_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.DeleteEnvironmentRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_environment), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_environment_flattened(): + client = DataplexServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_environment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_environment(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_environment_flattened_error(): + client = DataplexServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_environment( + service.DeleteEnvironmentRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_environment_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_environment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_environment(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_environment_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_environment( + service.DeleteEnvironmentRequest(), name="name_value", + ) + + +@pytest.mark.parametrize("request_type", [service.ListEnvironmentsRequest, dict,]) +def test_list_environments(request_type, transport: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListEnvironmentsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_environments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListEnvironmentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEnvironmentsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_environments_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), "__call__" + ) as call: + client.list_environments() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListEnvironmentsRequest() + + +@pytest.mark.asyncio +async def test_list_environments_async( + transport: str = "grpc_asyncio", request_type=service.ListEnvironmentsRequest +): + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListEnvironmentsResponse(next_page_token="next_page_token_value",) + ) + response = await client.list_environments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListEnvironmentsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEnvironmentsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_environments_async_from_dict(): + await test_list_environments_async(request_type=dict) + + +def test_list_environments_field_headers(): + client = DataplexServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListEnvironmentsRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), "__call__" + ) as call: + call.return_value = service.ListEnvironmentsResponse() + client.list_environments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_environments_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListEnvironmentsRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListEnvironmentsResponse() + ) + await client.list_environments(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_environments_flattened(): + client = DataplexServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListEnvironmentsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_environments(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_environments_flattened_error(): + client = DataplexServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_environments( + service.ListEnvironmentsRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_environments_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListEnvironmentsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListEnvironmentsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_environments(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_environments_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_environments( + service.ListEnvironmentsRequest(), parent="parent_value", + ) + + +def test_list_environments_pager(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListEnvironmentsResponse( + environments=[ + analyze.Environment(), + analyze.Environment(), + analyze.Environment(), + ], + next_page_token="abc", + ), + service.ListEnvironmentsResponse(environments=[], next_page_token="def",), + service.ListEnvironmentsResponse( + environments=[analyze.Environment(),], next_page_token="ghi", + ), + service.ListEnvironmentsResponse( + environments=[analyze.Environment(), analyze.Environment(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_environments(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, analyze.Environment) for i in results) + + +def test_list_environments_pages(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListEnvironmentsResponse( + environments=[ + analyze.Environment(), + analyze.Environment(), + analyze.Environment(), + ], + next_page_token="abc", + ), + service.ListEnvironmentsResponse(environments=[], next_page_token="def",), + service.ListEnvironmentsResponse( + environments=[analyze.Environment(),], next_page_token="ghi", + ), + service.ListEnvironmentsResponse( + environments=[analyze.Environment(), analyze.Environment(),], + ), + RuntimeError, + ) + pages = list(client.list_environments(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_environments_async_pager(): + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListEnvironmentsResponse( + environments=[ + analyze.Environment(), + analyze.Environment(), + analyze.Environment(), + ], + next_page_token="abc", + ), + service.ListEnvironmentsResponse(environments=[], next_page_token="def",), + service.ListEnvironmentsResponse( + environments=[analyze.Environment(),], next_page_token="ghi", + ), + service.ListEnvironmentsResponse( + environments=[analyze.Environment(), analyze.Environment(),], + ), + RuntimeError, + ) + async_pager = await client.list_environments(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, analyze.Environment) for i in responses) + + +@pytest.mark.asyncio +async def test_list_environments_async_pages(): + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_environments), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListEnvironmentsResponse( + environments=[ + analyze.Environment(), + analyze.Environment(), + analyze.Environment(), + ], + next_page_token="abc", + ), + service.ListEnvironmentsResponse(environments=[], next_page_token="def",), + service.ListEnvironmentsResponse( + environments=[analyze.Environment(),], next_page_token="ghi", + ), + service.ListEnvironmentsResponse( + environments=[analyze.Environment(), analyze.Environment(),], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_environments(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [service.GetEnvironmentRequest, dict,]) +def test_get_environment(request_type, transport: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_environment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = analyze.Environment( + name="name_value", + display_name="display_name_value", + uid="uid_value", + description="description_value", + state=resources.State.ACTIVE, + ) + response = client.get_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetEnvironmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, analyze.Environment) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.state == resources.State.ACTIVE + + +def test_get_environment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_environment), "__call__") as call: + client.get_environment() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetEnvironmentRequest() + + +@pytest.mark.asyncio +async def test_get_environment_async( + transport: str = "grpc_asyncio", request_type=service.GetEnvironmentRequest +): + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_environment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analyze.Environment( + name="name_value", + display_name="display_name_value", + uid="uid_value", + description="description_value", + state=resources.State.ACTIVE, + ) + ) + response = await client.get_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.GetEnvironmentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, analyze.Environment) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.state == resources.State.ACTIVE + + +@pytest.mark.asyncio +async def test_get_environment_async_from_dict(): + await test_get_environment_async(request_type=dict) + + +def test_get_environment_field_headers(): + client = DataplexServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetEnvironmentRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_environment), "__call__") as call: + call.return_value = analyze.Environment() + client.get_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_environment_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.GetEnvironmentRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_environment), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Environment()) + await client.get_environment(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_environment_flattened(): + client = DataplexServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_environment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = analyze.Environment() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_environment(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_environment_flattened_error(): + client = DataplexServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_environment( + service.GetEnvironmentRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_environment_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_environment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = analyze.Environment() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(analyze.Environment()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_environment(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_environment_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_environment( + service.GetEnvironmentRequest(), name="name_value", + ) + + +@pytest.mark.parametrize("request_type", [service.ListSessionsRequest, dict,]) +def test_list_sessions(request_type, transport: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sessions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListSessionsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListSessionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSessionsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_sessions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sessions), "__call__") as call: + client.list_sessions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListSessionsRequest() + + +@pytest.mark.asyncio +async def test_list_sessions_async( + transport: str = "grpc_asyncio", request_type=service.ListSessionsRequest +): + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sessions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListSessionsResponse(next_page_token="next_page_token_value",) + ) + response = await client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == service.ListSessionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSessionsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_sessions_async_from_dict(): + await test_list_sessions_async(request_type=dict) + + +def test_list_sessions_field_headers(): + client = DataplexServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListSessionsRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sessions), "__call__") as call: + call.return_value = service.ListSessionsResponse() + client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_sessions_field_headers_async(): + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = service.ListSessionsRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sessions), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListSessionsResponse() + ) + await client.list_sessions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_sessions_flattened(): + client = DataplexServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sessions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListSessionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_sessions(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_sessions_flattened_error(): + client = DataplexServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_sessions( + service.ListSessionsRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_sessions_flattened_async(): + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sessions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = service.ListSessionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + service.ListSessionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_sessions(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_sessions_flattened_error_async(): + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_sessions( + service.ListSessionsRequest(), parent="parent_value", + ) + + +def test_list_sessions_pager(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sessions), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSessionsResponse( + sessions=[analyze.Session(), analyze.Session(), analyze.Session(),], + next_page_token="abc", + ), + service.ListSessionsResponse(sessions=[], next_page_token="def",), + service.ListSessionsResponse( + sessions=[analyze.Session(),], next_page_token="ghi", + ), + service.ListSessionsResponse( + sessions=[analyze.Session(), analyze.Session(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_sessions(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, analyze.Session) for i in results) + + +def test_list_sessions_pages(transport_name: str = "grpc"): + client = DataplexServiceClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_sessions), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSessionsResponse( + sessions=[analyze.Session(), analyze.Session(), analyze.Session(),], + next_page_token="abc", + ), + service.ListSessionsResponse(sessions=[], next_page_token="def",), + service.ListSessionsResponse( + sessions=[analyze.Session(),], next_page_token="ghi", + ), + service.ListSessionsResponse( + sessions=[analyze.Session(), analyze.Session(),], + ), + RuntimeError, + ) + pages = list(client.list_sessions(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_sessions_async_pager(): + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSessionsResponse( + sessions=[analyze.Session(), analyze.Session(), analyze.Session(),], + next_page_token="abc", + ), + service.ListSessionsResponse(sessions=[], next_page_token="def",), + service.ListSessionsResponse( + sessions=[analyze.Session(),], next_page_token="ghi", + ), + service.ListSessionsResponse( + sessions=[analyze.Session(), analyze.Session(),], + ), + RuntimeError, + ) + async_pager = await client.list_sessions(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, analyze.Session) for i in responses) + + +@pytest.mark.asyncio +async def test_list_sessions_async_pages(): + client = DataplexServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sessions), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + service.ListSessionsResponse( + sessions=[analyze.Session(), analyze.Session(), analyze.Session(),], + next_page_token="abc", + ), + service.ListSessionsResponse(sessions=[], next_page_token="def",), + service.ListSessionsResponse( + sessions=[analyze.Session(),], next_page_token="ghi", + ), + service.ListSessionsResponse( + sessions=[analyze.Session(), analyze.Session(),], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_sessions(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.DataplexServiceGrpcTransport( @@ -7328,6 +8935,12 @@ def test_dataplex_service_base_transport(): "list_jobs", "get_job", "cancel_job", + "create_environment", + "update_environment", + "delete_environment", + "list_environments", + "get_environment", + "list_sessions", ) for method in methods: with pytest.raises(NotImplementedError): @@ -7709,12 +9322,40 @@ def test_parse_asset_path(): assert expected == actual -def test_job_path(): +def test_environment_path(): project = "cuttlefish" location = "mussel" lake = "winkle" - task = "nautilus" - job = "scallop" + environment = "nautilus" + expected = "projects/{project}/locations/{location}/lakes/{lake}/environments/{environment}".format( + project=project, location=location, lake=lake, environment=environment, + ) + actual = DataplexServiceClient.environment_path( + project, location, lake, environment + ) + assert expected == actual + + +def test_parse_environment_path(): + expected = { + "project": "scallop", + "location": "abalone", + "lake": "squid", + "environment": "clam", + } + path = DataplexServiceClient.environment_path(**expected) + + # Check that the path construction is reversible. + actual = DataplexServiceClient.parse_environment_path(path) + assert expected == actual + + +def test_job_path(): + project = "whelk" + location = "octopus" + lake = "oyster" + task = "nudibranch" + job = "cuttlefish" expected = "projects/{project}/locations/{location}/lakes/{lake}/tasks/{task}/jobs/{job}".format( project=project, location=location, lake=lake, task=task, job=job, ) @@ -7724,11 +9365,11 @@ def test_job_path(): def test_parse_job_path(): expected = { - "project": "abalone", - "location": "squid", - "lake": "clam", - "task": "whelk", - "job": "octopus", + "project": "mussel", + "location": "winkle", + "lake": "nautilus", + "task": "scallop", + "job": "abalone", } path = DataplexServiceClient.job_path(**expected) @@ -7738,9 +9379,9 @@ def test_parse_job_path(): def test_lake_path(): - project = "oyster" - location = "nudibranch" - lake = "cuttlefish" + project = "squid" + location = "clam" + lake = "whelk" expected = "projects/{project}/locations/{location}/lakes/{lake}".format( project=project, location=location, lake=lake, ) @@ -7750,9 +9391,9 @@ def test_lake_path(): def test_parse_lake_path(): expected = { - "project": "mussel", - "location": "winkle", - "lake": "nautilus", + "project": "octopus", + "location": "oyster", + "lake": "nudibranch", } path = DataplexServiceClient.lake_path(**expected) @@ -7761,11 +9402,45 @@ def test_parse_lake_path(): assert expected == actual +def test_session_path(): + project = "cuttlefish" + location = "mussel" + lake = "winkle" + environment = "nautilus" + session = "scallop" + expected = "projects/{project}/locations/{location}/lakes/{lake}/environments/{environment}/sessions/{session}".format( + project=project, + location=location, + lake=lake, + environment=environment, + session=session, + ) + actual = DataplexServiceClient.session_path( + project, location, lake, environment, session + ) + assert expected == actual + + +def test_parse_session_path(): + expected = { + "project": "abalone", + "location": "squid", + "lake": "clam", + "environment": "whelk", + "session": "octopus", + } + path = DataplexServiceClient.session_path(**expected) + + # Check that the path construction is reversible. + actual = DataplexServiceClient.parse_session_path(path) + assert expected == actual + + def test_task_path(): - project = "scallop" - location = "abalone" - lake = "squid" - task = "clam" + project = "oyster" + location = "nudibranch" + lake = "cuttlefish" + task = "mussel" expected = "projects/{project}/locations/{location}/lakes/{lake}/tasks/{task}".format( project=project, location=location, lake=lake, task=task, ) @@ -7775,10 +9450,10 @@ def test_task_path(): def test_parse_task_path(): expected = { - "project": "whelk", - "location": "octopus", - "lake": "oyster", - "task": "nudibranch", + "project": "winkle", + "location": "nautilus", + "lake": "scallop", + "task": "abalone", } path = DataplexServiceClient.task_path(**expected) @@ -7788,10 +9463,10 @@ def test_parse_task_path(): def test_zone_path(): - project = "cuttlefish" - location = "mussel" - lake = "winkle" - zone = "nautilus" + project = "squid" + location = "clam" + lake = "whelk" + zone = "octopus" expected = "projects/{project}/locations/{location}/lakes/{lake}/zones/{zone}".format( project=project, location=location, lake=lake, zone=zone, ) @@ -7801,10 +9476,10 @@ def test_zone_path(): def test_parse_zone_path(): expected = { - "project": "scallop", - "location": "abalone", - "lake": "squid", - "zone": "clam", + "project": "oyster", + "location": "nudibranch", + "lake": "cuttlefish", + "zone": "mussel", } path = DataplexServiceClient.zone_path(**expected) @@ -7814,7 +9489,7 @@ def test_parse_zone_path(): def test_common_billing_account_path(): - billing_account = "whelk" + billing_account = "winkle" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -7824,7 +9499,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "octopus", + "billing_account": "nautilus", } path = DataplexServiceClient.common_billing_account_path(**expected) @@ -7834,7 +9509,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "oyster" + folder = "scallop" expected = "folders/{folder}".format(folder=folder,) actual = DataplexServiceClient.common_folder_path(folder) assert expected == actual @@ -7842,7 +9517,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nudibranch", + "folder": "abalone", } path = DataplexServiceClient.common_folder_path(**expected) @@ -7852,7 +9527,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "cuttlefish" + organization = "squid" expected = "organizations/{organization}".format(organization=organization,) actual = DataplexServiceClient.common_organization_path(organization) assert expected == actual @@ -7860,7 +9535,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "mussel", + "organization": "clam", } path = DataplexServiceClient.common_organization_path(**expected) @@ -7870,7 +9545,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "winkle" + project = "whelk" expected = "projects/{project}".format(project=project,) actual = DataplexServiceClient.common_project_path(project) assert expected == actual @@ -7878,7 +9553,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "nautilus", + "project": "octopus", } path = DataplexServiceClient.common_project_path(**expected) @@ -7888,8 +9563,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "scallop" - location = "abalone" + project = "oyster" + location = "nudibranch" expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -7899,8 +9574,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "squid", - "location": "clam", + "project": "cuttlefish", + "location": "mussel", } path = DataplexServiceClient.common_location_path(**expected) diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_metadata_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_metadata_service.py index 812ed10b4a81..eaff5ca23aa8 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_metadata_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_metadata_service.py @@ -644,8 +644,8 @@ def test_metadata_service_client_create_channel_credentials_file( ) -@pytest.mark.parametrize("request_type", [metadata_.GetEntityRequest, dict,]) -def test_get_entity(request_type, transport: str = "grpc"): +@pytest.mark.parametrize("request_type", [metadata_.CreateEntityRequest, dict,]) +def test_create_entity(request_type, transport: str = "grpc"): client = MetadataServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -655,7 +655,7 @@ def test_get_entity(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_entity), "__call__") as call: + with mock.patch.object(type(client.transport.create_entity), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = metadata_.Entity( name="name_value", @@ -670,12 +670,12 @@ def test_get_entity(request_type, transport: str = "grpc"): catalog_entry="catalog_entry_value", system=metadata_.StorageSystem.CLOUD_STORAGE, ) - response = client.get_entity(request) + response = client.create_entity(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == metadata_.GetEntityRequest() + assert args[0] == metadata_.CreateEntityRequest() # Establish that the response is the type that we expect. assert isinstance(response, metadata_.Entity) @@ -692,7 +692,7 @@ def test_get_entity(request_type, transport: str = "grpc"): assert response.system == metadata_.StorageSystem.CLOUD_STORAGE -def test_get_entity_empty_call(): +def test_create_entity_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetadataServiceClient( @@ -700,16 +700,16 @@ def test_get_entity_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_entity), "__call__") as call: - client.get_entity() + with mock.patch.object(type(client.transport.create_entity), "__call__") as call: + client.create_entity() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == metadata_.GetEntityRequest() + assert args[0] == metadata_.CreateEntityRequest() @pytest.mark.asyncio -async def test_get_entity_async( - transport: str = "grpc_asyncio", request_type=metadata_.GetEntityRequest +async def test_create_entity_async( + transport: str = "grpc_asyncio", request_type=metadata_.CreateEntityRequest ): client = MetadataServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -720,7 +720,7 @@ async def test_get_entity_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_entity), "__call__") as call: + with mock.patch.object(type(client.transport.create_entity), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( metadata_.Entity( @@ -737,12 +737,12 @@ async def test_get_entity_async( system=metadata_.StorageSystem.CLOUD_STORAGE, ) ) - response = await client.get_entity(request) + response = await client.create_entity(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == metadata_.GetEntityRequest() + assert args[0] == metadata_.CreateEntityRequest() # Establish that the response is the type that we expect. assert isinstance(response, metadata_.Entity) @@ -760,23 +760,23 @@ async def test_get_entity_async( @pytest.mark.asyncio -async def test_get_entity_async_from_dict(): - await test_get_entity_async(request_type=dict) +async def test_create_entity_async_from_dict(): + await test_create_entity_async(request_type=dict) -def test_get_entity_field_headers(): +def test_create_entity_field_headers(): client = MetadataServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = metadata_.GetEntityRequest() + request = metadata_.CreateEntityRequest() - request.name = "name/value" + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_entity), "__call__") as call: + with mock.patch.object(type(client.transport.create_entity), "__call__") as call: call.return_value = metadata_.Entity() - client.get_entity(request) + client.create_entity(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -785,25 +785,25 @@ def test_get_entity_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] @pytest.mark.asyncio -async def test_get_entity_field_headers_async(): +async def test_create_entity_field_headers_async(): client = MetadataServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = metadata_.GetEntityRequest() + request = metadata_.CreateEntityRequest() - request.name = "name/value" + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_entity), "__call__") as call: + with mock.patch.object(type(client.transport.create_entity), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity()) - await client.get_entity(request) + await client.create_entity(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -812,67 +812,79 @@ async def test_get_entity_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] -def test_get_entity_flattened(): +def test_create_entity_flattened(): client = MetadataServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_entity), "__call__") as call: + with mock.patch.object(type(client.transport.create_entity), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = metadata_.Entity() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_entity(name="name_value",) + client.create_entity( + parent="parent_value", entity=metadata_.Entity(name="name_value"), + ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].entity + mock_val = metadata_.Entity(name="name_value") assert arg == mock_val -def test_get_entity_flattened_error(): +def test_create_entity_flattened_error(): client = MetadataServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_entity( - metadata_.GetEntityRequest(), name="name_value", + client.create_entity( + metadata_.CreateEntityRequest(), + parent="parent_value", + entity=metadata_.Entity(name="name_value"), ) @pytest.mark.asyncio -async def test_get_entity_flattened_async(): +async def test_create_entity_flattened_async(): client = MetadataServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_entity), "__call__") as call: + with mock.patch.object(type(client.transport.create_entity), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = metadata_.Entity() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_entity(name="name_value",) + response = await client.create_entity( + parent="parent_value", entity=metadata_.Entity(name="name_value"), + ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].entity + mock_val = metadata_.Entity(name="name_value") assert arg == mock_val @pytest.mark.asyncio -async def test_get_entity_flattened_error_async(): +async def test_create_entity_flattened_error_async(): client = MetadataServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -880,13 +892,15 @@ async def test_get_entity_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_entity( - metadata_.GetEntityRequest(), name="name_value", + await client.create_entity( + metadata_.CreateEntityRequest(), + parent="parent_value", + entity=metadata_.Entity(name="name_value"), ) -@pytest.mark.parametrize("request_type", [metadata_.ListEntitiesRequest, dict,]) -def test_list_entities(request_type, transport: str = "grpc"): +@pytest.mark.parametrize("request_type", [metadata_.UpdateEntityRequest, dict,]) +def test_update_entity(request_type, transport: str = "grpc"): client = MetadataServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -896,24 +910,44 @@ def test_list_entities(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_entities), "__call__") as call: + with mock.patch.object(type(client.transport.update_entity), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = metadata_.ListEntitiesResponse( - next_page_token="next_page_token_value", + call.return_value = metadata_.Entity( + name="name_value", + display_name="display_name_value", + description="description_value", + id="id_value", + etag="etag_value", + type_=metadata_.Entity.Type.TABLE, + asset="asset_value", + data_path="data_path_value", + data_path_pattern="data_path_pattern_value", + catalog_entry="catalog_entry_value", + system=metadata_.StorageSystem.CLOUD_STORAGE, ) - response = client.list_entities(request) + response = client.update_entity(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == metadata_.ListEntitiesRequest() + assert args[0] == metadata_.UpdateEntityRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntitiesPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, metadata_.Entity) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.id == "id_value" + assert response.etag == "etag_value" + assert response.type_ == metadata_.Entity.Type.TABLE + assert response.asset == "asset_value" + assert response.data_path == "data_path_value" + assert response.data_path_pattern == "data_path_pattern_value" + assert response.catalog_entry == "catalog_entry_value" + assert response.system == metadata_.StorageSystem.CLOUD_STORAGE -def test_list_entities_empty_call(): +def test_update_entity_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetadataServiceClient( @@ -921,16 +955,16 @@ def test_list_entities_empty_call(): ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_entities), "__call__") as call: - client.list_entities() + with mock.patch.object(type(client.transport.update_entity), "__call__") as call: + client.update_entity() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == metadata_.ListEntitiesRequest() + assert args[0] == metadata_.UpdateEntityRequest() @pytest.mark.asyncio -async def test_list_entities_async( - transport: str = "grpc_asyncio", request_type=metadata_.ListEntitiesRequest +async def test_update_entity_async( + transport: str = "grpc_asyncio", request_type=metadata_.UpdateEntityRequest ): client = MetadataServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -941,41 +975,63 @@ async def test_list_entities_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_entities), "__call__") as call: + with mock.patch.object(type(client.transport.update_entity), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - metadata_.ListEntitiesResponse(next_page_token="next_page_token_value",) + metadata_.Entity( + name="name_value", + display_name="display_name_value", + description="description_value", + id="id_value", + etag="etag_value", + type_=metadata_.Entity.Type.TABLE, + asset="asset_value", + data_path="data_path_value", + data_path_pattern="data_path_pattern_value", + catalog_entry="catalog_entry_value", + system=metadata_.StorageSystem.CLOUD_STORAGE, + ) ) - response = await client.list_entities(request) + response = await client.update_entity(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == metadata_.ListEntitiesRequest() + assert args[0] == metadata_.UpdateEntityRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntitiesAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, metadata_.Entity) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.id == "id_value" + assert response.etag == "etag_value" + assert response.type_ == metadata_.Entity.Type.TABLE + assert response.asset == "asset_value" + assert response.data_path == "data_path_value" + assert response.data_path_pattern == "data_path_pattern_value" + assert response.catalog_entry == "catalog_entry_value" + assert response.system == metadata_.StorageSystem.CLOUD_STORAGE @pytest.mark.asyncio -async def test_list_entities_async_from_dict(): - await test_list_entities_async(request_type=dict) +async def test_update_entity_async_from_dict(): + await test_update_entity_async(request_type=dict) -def test_list_entities_field_headers(): +def test_update_entity_field_headers(): client = MetadataServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = metadata_.ListEntitiesRequest() + request = metadata_.UpdateEntityRequest() - request.parent = "parent/value" + request.entity.name = "entity.name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_entities), "__call__") as call: - call.return_value = metadata_.ListEntitiesResponse() - client.list_entities(request) + with mock.patch.object(type(client.transport.update_entity), "__call__") as call: + call.return_value = metadata_.Entity() + client.update_entity(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -984,27 +1040,25 @@ def test_list_entities_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ("x-goog-request-params", "entity.name=entity.name/value",) in kw["metadata"] @pytest.mark.asyncio -async def test_list_entities_field_headers_async(): +async def test_update_entity_field_headers_async(): client = MetadataServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = metadata_.ListEntitiesRequest() + request = metadata_.UpdateEntityRequest() - request.parent = "parent/value" + request.entity.name = "entity.name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_entities), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - metadata_.ListEntitiesResponse() - ) - await client.list_entities(request) + with mock.patch.object(type(client.transport.update_entity), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity()) + await client.update_entity(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -1013,54 +1067,619 @@ async def test_list_entities_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ("x-goog-request-params", "entity.name=entity.name/value",) in kw["metadata"] -def test_list_entities_flattened(): +@pytest.mark.parametrize("request_type", [metadata_.DeleteEntityRequest, dict,]) +def test_delete_entity(request_type, transport: str = "grpc"): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_entity), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_.DeleteEntityRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_entity_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_entity), "__call__") as call: + client.delete_entity() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_.DeleteEntityRequest() + + +@pytest.mark.asyncio +async def test_delete_entity_async( + transport: str = "grpc_asyncio", request_type=metadata_.DeleteEntityRequest +): + client = MetadataServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_entity), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_.DeleteEntityRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_entity_async_from_dict(): + await test_delete_entity_async(request_type=dict) + + +def test_delete_entity_field_headers(): client = MetadataServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.DeleteEntityRequest() + + request.name = "name/value" + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_entities), "__call__") as call: + with mock.patch.object(type(client.transport.delete_entity), "__call__") as call: + call.return_value = None + client.delete_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_entity_field_headers_async(): + client = MetadataServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.DeleteEntityRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_entity), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_entity_flattened(): + client = MetadataServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_entity), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = metadata_.ListEntitiesResponse() + call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_entities(parent="parent_value",) + client.delete_entity(name="name_value",) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_list_entities_flattened_error(): +def test_delete_entity_flattened_error(): client = MetadataServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_entities( - metadata_.ListEntitiesRequest(), parent="parent_value", + client.delete_entity( + metadata_.DeleteEntityRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_list_entities_flattened_async(): +async def test_delete_entity_flattened_async(): client = MetadataServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_entities), "__call__") as call: + with mock.patch.object(type(client.transport.delete_entity), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = metadata_.ListEntitiesResponse() + call.return_value = None - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - metadata_.ListEntitiesResponse() - ) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_entity(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_entity_flattened_error_async(): + client = MetadataServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_entity( + metadata_.DeleteEntityRequest(), name="name_value", + ) + + +@pytest.mark.parametrize("request_type", [metadata_.GetEntityRequest, dict,]) +def test_get_entity(request_type, transport: str = "grpc"): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_entity), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.Entity( + name="name_value", + display_name="display_name_value", + description="description_value", + id="id_value", + etag="etag_value", + type_=metadata_.Entity.Type.TABLE, + asset="asset_value", + data_path="data_path_value", + data_path_pattern="data_path_pattern_value", + catalog_entry="catalog_entry_value", + system=metadata_.StorageSystem.CLOUD_STORAGE, + ) + response = client.get_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_.GetEntityRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metadata_.Entity) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.id == "id_value" + assert response.etag == "etag_value" + assert response.type_ == metadata_.Entity.Type.TABLE + assert response.asset == "asset_value" + assert response.data_path == "data_path_value" + assert response.data_path_pattern == "data_path_pattern_value" + assert response.catalog_entry == "catalog_entry_value" + assert response.system == metadata_.StorageSystem.CLOUD_STORAGE + + +def test_get_entity_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_entity), "__call__") as call: + client.get_entity() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_.GetEntityRequest() + + +@pytest.mark.asyncio +async def test_get_entity_async( + transport: str = "grpc_asyncio", request_type=metadata_.GetEntityRequest +): + client = MetadataServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_entity), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metadata_.Entity( + name="name_value", + display_name="display_name_value", + description="description_value", + id="id_value", + etag="etag_value", + type_=metadata_.Entity.Type.TABLE, + asset="asset_value", + data_path="data_path_value", + data_path_pattern="data_path_pattern_value", + catalog_entry="catalog_entry_value", + system=metadata_.StorageSystem.CLOUD_STORAGE, + ) + ) + response = await client.get_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_.GetEntityRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metadata_.Entity) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.id == "id_value" + assert response.etag == "etag_value" + assert response.type_ == metadata_.Entity.Type.TABLE + assert response.asset == "asset_value" + assert response.data_path == "data_path_value" + assert response.data_path_pattern == "data_path_pattern_value" + assert response.catalog_entry == "catalog_entry_value" + assert response.system == metadata_.StorageSystem.CLOUD_STORAGE + + +@pytest.mark.asyncio +async def test_get_entity_async_from_dict(): + await test_get_entity_async(request_type=dict) + + +def test_get_entity_field_headers(): + client = MetadataServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.GetEntityRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_entity), "__call__") as call: + call.return_value = metadata_.Entity() + client.get_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_entity_field_headers_async(): + client = MetadataServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.GetEntityRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_entity), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity()) + await client.get_entity(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_entity_flattened(): + client = MetadataServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_entity), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.Entity() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_entity(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_entity_flattened_error(): + client = MetadataServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_entity( + metadata_.GetEntityRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_entity_flattened_async(): + client = MetadataServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_entity), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.Entity() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Entity()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_entity(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_entity_flattened_error_async(): + client = MetadataServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_entity( + metadata_.GetEntityRequest(), name="name_value", + ) + + +@pytest.mark.parametrize("request_type", [metadata_.ListEntitiesRequest, dict,]) +def test_list_entities(request_type, transport: str = "grpc"): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_entities), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.ListEntitiesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_.ListEntitiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntitiesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_entities_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_entities), "__call__") as call: + client.list_entities() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_.ListEntitiesRequest() + + +@pytest.mark.asyncio +async def test_list_entities_async( + transport: str = "grpc_asyncio", request_type=metadata_.ListEntitiesRequest +): + client = MetadataServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_entities), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metadata_.ListEntitiesResponse(next_page_token="next_page_token_value",) + ) + response = await client.list_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_.ListEntitiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntitiesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_entities_async_from_dict(): + await test_list_entities_async(request_type=dict) + + +def test_list_entities_field_headers(): + client = MetadataServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.ListEntitiesRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_entities), "__call__") as call: + call.return_value = metadata_.ListEntitiesResponse() + client.list_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_entities_field_headers_async(): + client = MetadataServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.ListEntitiesRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_entities), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metadata_.ListEntitiesResponse() + ) + await client.list_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_entities_flattened(): + client = MetadataServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_entities), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.ListEntitiesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_entities(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_entities_flattened_error(): + client = MetadataServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_entities( + metadata_.ListEntitiesRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_entities_flattened_async(): + client = MetadataServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_entities), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.ListEntitiesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metadata_.ListEntitiesResponse() + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.list_entities(parent="parent_value",) @@ -1075,7 +1694,362 @@ async def test_list_entities_flattened_async(): @pytest.mark.asyncio -async def test_list_entities_flattened_error_async(): +async def test_list_entities_flattened_error_async(): + client = MetadataServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_entities( + metadata_.ListEntitiesRequest(), parent="parent_value", + ) + + +def test_list_entities_pager(transport_name: str = "grpc"): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_entities), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + metadata_.ListEntitiesResponse( + entities=[metadata_.Entity(), metadata_.Entity(), metadata_.Entity(),], + next_page_token="abc", + ), + metadata_.ListEntitiesResponse(entities=[], next_page_token="def",), + metadata_.ListEntitiesResponse( + entities=[metadata_.Entity(),], next_page_token="ghi", + ), + metadata_.ListEntitiesResponse( + entities=[metadata_.Entity(), metadata_.Entity(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_entities(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, metadata_.Entity) for i in results) + + +def test_list_entities_pages(transport_name: str = "grpc"): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_entities), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + metadata_.ListEntitiesResponse( + entities=[metadata_.Entity(), metadata_.Entity(), metadata_.Entity(),], + next_page_token="abc", + ), + metadata_.ListEntitiesResponse(entities=[], next_page_token="def",), + metadata_.ListEntitiesResponse( + entities=[metadata_.Entity(),], next_page_token="ghi", + ), + metadata_.ListEntitiesResponse( + entities=[metadata_.Entity(), metadata_.Entity(),], + ), + RuntimeError, + ) + pages = list(client.list_entities(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_entities_async_pager(): + client = MetadataServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entities), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + metadata_.ListEntitiesResponse( + entities=[metadata_.Entity(), metadata_.Entity(), metadata_.Entity(),], + next_page_token="abc", + ), + metadata_.ListEntitiesResponse(entities=[], next_page_token="def",), + metadata_.ListEntitiesResponse( + entities=[metadata_.Entity(),], next_page_token="ghi", + ), + metadata_.ListEntitiesResponse( + entities=[metadata_.Entity(), metadata_.Entity(),], + ), + RuntimeError, + ) + async_pager = await client.list_entities(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, metadata_.Entity) for i in responses) + + +@pytest.mark.asyncio +async def test_list_entities_async_pages(): + client = MetadataServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_entities), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + metadata_.ListEntitiesResponse( + entities=[metadata_.Entity(), metadata_.Entity(), metadata_.Entity(),], + next_page_token="abc", + ), + metadata_.ListEntitiesResponse(entities=[], next_page_token="def",), + metadata_.ListEntitiesResponse( + entities=[metadata_.Entity(),], next_page_token="ghi", + ), + metadata_.ListEntitiesResponse( + entities=[metadata_.Entity(), metadata_.Entity(),], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_entities(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [metadata_.CreatePartitionRequest, dict,]) +def test_create_partition(request_type, transport: str = "grpc"): + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_partition), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.Partition( + name="name_value", + values=["values_value"], + location="location_value", + etag="etag_value", + ) + response = client.create_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_.CreatePartitionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metadata_.Partition) + assert response.name == "name_value" + assert response.values == ["values_value"] + assert response.location == "location_value" + assert response.etag == "etag_value" + + +def test_create_partition_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetadataServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_partition), "__call__") as call: + client.create_partition() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_.CreatePartitionRequest() + + +@pytest.mark.asyncio +async def test_create_partition_async( + transport: str = "grpc_asyncio", request_type=metadata_.CreatePartitionRequest +): + client = MetadataServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_partition), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + metadata_.Partition( + name="name_value", + values=["values_value"], + location="location_value", + etag="etag_value", + ) + ) + response = await client.create_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_.CreatePartitionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, metadata_.Partition) + assert response.name == "name_value" + assert response.values == ["values_value"] + assert response.location == "location_value" + assert response.etag == "etag_value" + + +@pytest.mark.asyncio +async def test_create_partition_async_from_dict(): + await test_create_partition_async(request_type=dict) + + +def test_create_partition_field_headers(): + client = MetadataServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.CreatePartitionRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_partition), "__call__") as call: + call.return_value = metadata_.Partition() + client.create_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_partition_field_headers_async(): + client = MetadataServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.CreatePartitionRequest() + + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_partition), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Partition()) + await client.create_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_partition_flattened(): + client = MetadataServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_partition), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.Partition() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_partition( + parent="parent_value", partition=metadata_.Partition(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].partition + mock_val = metadata_.Partition(name="name_value") + assert arg == mock_val + + +def test_create_partition_flattened_error(): + client = MetadataServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_partition( + metadata_.CreatePartitionRequest(), + parent="parent_value", + partition=metadata_.Partition(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_partition_flattened_async(): + client = MetadataServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_partition), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = metadata_.Partition() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(metadata_.Partition()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_partition( + parent="parent_value", partition=metadata_.Partition(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].partition + mock_val = metadata_.Partition(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_partition_flattened_error_async(): client = MetadataServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -1083,139 +2057,204 @@ async def test_list_entities_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_entities( - metadata_.ListEntitiesRequest(), parent="parent_value", + await client.create_partition( + metadata_.CreatePartitionRequest(), + parent="parent_value", + partition=metadata_.Partition(name="name_value"), ) -def test_list_entities_pager(transport_name: str = "grpc"): +@pytest.mark.parametrize("request_type", [metadata_.DeletePartitionRequest, dict,]) +def test_delete_partition(request_type, transport: str = "grpc"): client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_entities), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - metadata_.ListEntitiesResponse( - entities=[metadata_.Entity(), metadata_.Entity(), metadata_.Entity(),], - next_page_token="abc", - ), - metadata_.ListEntitiesResponse(entities=[], next_page_token="def",), - metadata_.ListEntitiesResponse( - entities=[metadata_.Entity(),], next_page_token="ghi", - ), - metadata_.ListEntitiesResponse( - entities=[metadata_.Entity(), metadata_.Entity(),], - ), - RuntimeError, - ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_entities(request={}) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_partition), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_partition(request) - assert pager._metadata == metadata + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_.DeletePartitionRequest() - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, metadata_.Entity) for i in results) + # Establish that the response is the type that we expect. + assert response is None -def test_list_entities_pages(transport_name: str = "grpc"): +def test_delete_partition_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = MetadataServiceClient( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_entities), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - metadata_.ListEntitiesResponse( - entities=[metadata_.Entity(), metadata_.Entity(), metadata_.Entity(),], - next_page_token="abc", - ), - metadata_.ListEntitiesResponse(entities=[], next_page_token="def",), - metadata_.ListEntitiesResponse( - entities=[metadata_.Entity(),], next_page_token="ghi", - ), - metadata_.ListEntitiesResponse( - entities=[metadata_.Entity(), metadata_.Entity(),], - ), - RuntimeError, - ) - pages = list(client.list_entities(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + with mock.patch.object(type(client.transport.delete_partition), "__call__") as call: + client.delete_partition() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_.DeletePartitionRequest() @pytest.mark.asyncio -async def test_list_entities_async_pager(): +async def test_delete_partition_async( + transport: str = "grpc_asyncio", request_type=metadata_.DeletePartitionRequest +): client = MetadataServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entities), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - metadata_.ListEntitiesResponse( - entities=[metadata_.Entity(), metadata_.Entity(), metadata_.Entity(),], - next_page_token="abc", - ), - metadata_.ListEntitiesResponse(entities=[], next_page_token="def",), - metadata_.ListEntitiesResponse( - entities=[metadata_.Entity(),], next_page_token="ghi", - ), - metadata_.ListEntitiesResponse( - entities=[metadata_.Entity(), metadata_.Entity(),], - ), - RuntimeError, - ) - async_pager = await client.list_entities(request={},) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: - responses.append(response) + with mock.patch.object(type(client.transport.delete_partition), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_partition(request) - assert len(responses) == 6 - assert all(isinstance(i, metadata_.Entity) for i in responses) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == metadata_.DeletePartitionRequest() + + # Establish that the response is the type that we expect. + assert response is None @pytest.mark.asyncio -async def test_list_entities_async_pages(): +async def test_delete_partition_async_from_dict(): + await test_delete_partition_async(request_type=dict) + + +def test_delete_partition_field_headers(): + client = MetadataServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.DeletePartitionRequest() + + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_partition), "__call__") as call: + call.return_value = None + client.delete_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_partition_field_headers_async(): client = MetadataServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = metadata_.DeletePartitionRequest() + + request.name = "name/value" + # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_entities), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - metadata_.ListEntitiesResponse( - entities=[metadata_.Entity(), metadata_.Entity(), metadata_.Entity(),], - next_page_token="abc", - ), - metadata_.ListEntitiesResponse(entities=[], next_page_token="def",), - metadata_.ListEntitiesResponse( - entities=[metadata_.Entity(),], next_page_token="ghi", - ), - metadata_.ListEntitiesResponse( - entities=[metadata_.Entity(), metadata_.Entity(),], - ), - RuntimeError, + with mock.patch.object(type(client.transport.delete_partition), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_partition(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_partition_flattened(): + client = MetadataServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_partition), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_partition(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_partition_flattened_error(): + client = MetadataServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_partition( + metadata_.DeletePartitionRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_partition_flattened_async(): + client = MetadataServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_partition), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_partition(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_partition_flattened_error_async(): + client = MetadataServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_partition( + metadata_.DeletePartitionRequest(), name="name_value", ) - pages = [] - async for page_ in (await client.list_entities(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token @pytest.mark.parametrize("request_type", [metadata_.GetPartitionRequest, dict,]) @@ -1894,8 +2933,13 @@ def test_metadata_service_base_transport(): # Every method on the transport should just blindly # raise NotImplementedError. methods = ( + "create_entity", + "update_entity", + "delete_entity", "get_entity", "list_entities", + "create_partition", + "delete_partition", "get_partition", "list_partitions", )