From b320a3418662cdd7a6096e252ec258ae851e31aa Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 29 May 2023 09:44:36 -0400 Subject: [PATCH] feat: add support for entries associated with Spanner and CloudBigTable (#475) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add support for entries associated with Spanner and ClougBigTable feat: expand SearchCatalogResponse with totalSize feat: modify documentation for FQN support feat: extend ImportApiRequest with jobId parameter PiperOrigin-RevId: 536127089 Source-Link: https://github.com/googleapis/googleapis/commit/208fb58cd6f412fd9b9438ce8dcc13ea5b307a45 Source-Link: https://github.com/googleapis/googleapis-gen/commit/df299c5888ab0f55f1dad63918ac88b2b15ba4e2 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZGYyOTljNTg4OGFiMGY1NWYxZGFkNjM5MThhYzg4YjJiMTViYTRlMiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../google/cloud/datacatalog/__init__.py | 6 + .../google/cloud/datacatalog_v1/__init__.py | 6 + .../services/data_catalog/async_client.py | 99 +--------- .../services/data_catalog/transports/base.py | 99 +--------- .../cloud/datacatalog_v1/types/__init__.py | 6 + .../cloud/datacatalog_v1/types/common.py | 6 + .../cloud/datacatalog_v1/types/datacatalog.py | 180 ++++++++++++++++-- .../datacatalog_v1/types/dump_content.py | 12 +- .../datacatalog_v1/types/policytagmanager.py | 2 +- ..._metadata_google.cloud.datacatalog.v1.json | 2 +- ...data_google.cloud.datacatalog.v1beta1.json | 2 +- .../scripts/fixup_datacatalog_v1_keywords.py | 4 +- .../gapic/datacatalog_v1/test_data_catalog.py | 4 + 13 files changed, 217 insertions(+), 211 deletions(-) diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog/__init__.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog/__init__.py index 0932a07f05c1..28f059e99545 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog/__init__.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog/__init__.py @@ -47,6 +47,8 @@ from google.cloud.datacatalog_v1.types.data_source import DataSource, StorageProperties from google.cloud.datacatalog_v1.types.datacatalog import ( BusinessContext, + CloudBigtableInstanceSpec, + CloudBigtableSystemSpec, Contacts, CreateEntryGroupRequest, CreateEntryRequest, @@ -89,6 +91,7 @@ RoutineSpec, SearchCatalogRequest, SearchCatalogResponse, + ServiceSpec, SqlDatabaseSystemSpec, StarEntryRequest, StarEntryResponse, @@ -181,6 +184,8 @@ "DataSource", "StorageProperties", "BusinessContext", + "CloudBigtableInstanceSpec", + "CloudBigtableSystemSpec", "Contacts", "CreateEntryGroupRequest", "CreateEntryRequest", @@ -222,6 +227,7 @@ "RoutineSpec", "SearchCatalogRequest", "SearchCatalogResponse", + "ServiceSpec", "SqlDatabaseSystemSpec", "StarEntryRequest", "StarEntryResponse", diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/__init__.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/__init__.py index 661af336fd2e..6488aa363e64 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/__init__.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/__init__.py @@ -36,6 +36,8 @@ from .types.data_source import DataSource, StorageProperties from .types.datacatalog import ( BusinessContext, + CloudBigtableInstanceSpec, + CloudBigtableSystemSpec, Contacts, CreateEntryGroupRequest, CreateEntryRequest, @@ -78,6 +80,7 @@ RoutineSpec, SearchCatalogRequest, SearchCatalogResponse, + ServiceSpec, SqlDatabaseSystemSpec, StarEntryRequest, StarEntryResponse, @@ -147,6 +150,8 @@ "BigQueryRoutineSpec", "BigQueryTableSpec", "BusinessContext", + "CloudBigtableInstanceSpec", + "CloudBigtableSystemSpec", "CloudSqlBigQueryConnectionSpec", "ColumnSchema", "CommonUsageStats", @@ -231,6 +236,7 @@ "SearchResultType", "SerializedPolicyTag", "SerializedTaxonomy", + "ServiceSpec", "SqlDatabaseSystemSpec", "StarEntryRequest", "StarEntryResponse", diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/async_client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/async_client.py index b64dadd1662c..618c7e844157 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/async_client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/async_client.py @@ -371,16 +371,7 @@ async def sample_search_catalog(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.search_catalog, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, + default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -661,16 +652,7 @@ async def sample_get_entry_group(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_entry_group, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, + default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -998,16 +980,7 @@ async def sample_list_entry_groups(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_entry_groups, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, + default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -1541,16 +1514,7 @@ async def sample_get_entry(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_entry, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, + default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -1641,16 +1605,7 @@ async def sample_lookup_entry(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.lookup_entry, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, + default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -1755,16 +1710,7 @@ async def sample_list_entries(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_entries, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, + default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -3554,16 +3500,7 @@ async def sample_list_tags(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_tags, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, + default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -4262,16 +4199,7 @@ async def sample_get_iam_policy(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_iam_policy, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, + default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -4473,16 +4401,7 @@ async def sample_import_entries(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.import_entries, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, + default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/transports/base.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/transports/base.py index 359235fa9ef1..ac172bf90163 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/transports/base.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/transports/base.py @@ -128,16 +128,7 @@ def _prep_wrapped_messages(self, client_info): self._wrapped_methods = { self.search_catalog: gapic_v1.method.wrap_method( self.search_catalog, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, + default_timeout=None, client_info=client_info, ), self.create_entry_group: gapic_v1.method.wrap_method( @@ -147,16 +138,7 @@ def _prep_wrapped_messages(self, client_info): ), self.get_entry_group: gapic_v1.method.wrap_method( self.get_entry_group, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, + default_timeout=None, client_info=client_info, ), self.update_entry_group: gapic_v1.method.wrap_method( @@ -171,16 +153,7 @@ def _prep_wrapped_messages(self, client_info): ), self.list_entry_groups: gapic_v1.method.wrap_method( self.list_entry_groups, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, + default_timeout=None, client_info=client_info, ), self.create_entry: gapic_v1.method.wrap_method( @@ -200,44 +173,17 @@ def _prep_wrapped_messages(self, client_info): ), self.get_entry: gapic_v1.method.wrap_method( self.get_entry, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, + default_timeout=None, client_info=client_info, ), self.lookup_entry: gapic_v1.method.wrap_method( self.lookup_entry, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, + default_timeout=None, client_info=client_info, ), self.list_entries: gapic_v1.method.wrap_method( self.list_entries, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, + default_timeout=None, client_info=client_info, ), self.modify_entry_overview: gapic_v1.method.wrap_method( @@ -312,16 +258,7 @@ def _prep_wrapped_messages(self, client_info): ), self.list_tags: gapic_v1.method.wrap_method( self.list_tags, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, + default_timeout=None, client_info=client_info, ), self.reconcile_tags: gapic_v1.method.wrap_method( @@ -346,16 +283,7 @@ def _prep_wrapped_messages(self, client_info): ), self.get_iam_policy: gapic_v1.method.wrap_method( self.get_iam_policy, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, + default_timeout=None, client_info=client_info, ), self.test_iam_permissions: gapic_v1.method.wrap_method( @@ -365,16 +293,7 @@ def _prep_wrapped_messages(self, client_info): ), self.import_entries: gapic_v1.method.wrap_method( self.import_entries, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, + default_timeout=None, client_info=client_info, ), } diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/__init__.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/__init__.py index b3b934a797ea..4738765c79e0 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/__init__.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/__init__.py @@ -22,6 +22,8 @@ from .data_source import DataSource, StorageProperties from .datacatalog import ( BusinessContext, + CloudBigtableInstanceSpec, + CloudBigtableSystemSpec, Contacts, CreateEntryGroupRequest, CreateEntryRequest, @@ -64,6 +66,7 @@ RoutineSpec, SearchCatalogRequest, SearchCatalogResponse, + ServiceSpec, SqlDatabaseSystemSpec, StarEntryRequest, StarEntryResponse, @@ -134,6 +137,8 @@ "DataSource", "StorageProperties", "BusinessContext", + "CloudBigtableInstanceSpec", + "CloudBigtableSystemSpec", "Contacts", "CreateEntryGroupRequest", "CreateEntryRequest", @@ -175,6 +180,7 @@ "RoutineSpec", "SearchCatalogRequest", "SearchCatalogResponse", + "ServiceSpec", "SqlDatabaseSystemSpec", "StarEntryRequest", "StarEntryResponse", diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/common.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/common.py index 15706f2afdaa..6fd09eaa4a40 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/common.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/common.py @@ -45,6 +45,10 @@ class IntegratedSystem(proto.Enum): Dataproc Metastore. DATAPLEX (4): Dataplex. + CLOUD_SPANNER (6): + Cloud Spanner + CLOUD_BIGTABLE (7): + Cloud Bigtable CLOUD_SQL (8): Cloud Sql LOOKER (9): @@ -55,6 +59,8 @@ class IntegratedSystem(proto.Enum): CLOUD_PUBSUB = 2 DATAPROC_METASTORE = 3 DATAPLEX = 4 + CLOUD_SPANNER = 6 + CLOUD_BIGTABLE = 7 CLOUD_SQL = 8 LOOKER = 9 diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/datacatalog.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/datacatalog.py index 6813b31b656a..4ea9a2260532 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/datacatalog.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/datacatalog.py @@ -54,6 +54,9 @@ "RoutineSpec", "SqlDatabaseSystemSpec", "LookerSystemSpec", + "CloudBigtableSystemSpec", + "CloudBigtableInstanceSpec", + "ServiceSpec", "BusinessContext", "EntryOverview", "Contacts", @@ -320,6 +323,9 @@ class SearchCatalogResponse(proto.Message): Attributes: results (MutableSequence[google.cloud.datacatalog_v1.types.SearchCatalogResult]): Search results. + total_size (int): + The approximate total number of entries + matched by the query. next_page_token (str): Pagination token that can be used in subsequent calls to retrieve the next page of @@ -343,6 +349,10 @@ def raw_page(self): number=1, message=search.SearchCatalogResult, ) + total_size: int = proto.Field( + proto.INT32, + number=2, + ) next_page_token: str = proto.Field( proto.STRING, number=3, @@ -685,7 +695,9 @@ class LookupEntryRequest(proto.Message): This field is a member of `oneof`_ ``target_name``. fully_qualified_name (str): - Fully qualified name (FQN) of the resource. + `Fully Qualified Name + (FQN) `__ + of the resource. FQNs take two forms: @@ -702,6 +714,16 @@ class LookupEntryRequest(proto.Message): ``dataproc_metastore:{PROJECT_ID}.{LOCATION_ID}.{INSTANCE_ID}.{DATABASE_ID}.{TABLE_ID}`` This field is a member of `oneof`_ ``target_name``. + project (str): + Project where the lookup should be performed. Required to + lookup entry that is not a part of ``DPMS`` or ``DATAPLEX`` + ``integrated_system`` using its ``fully_qualified_name``. + Ignored in other cases. + location (str): + Location where the lookup should be performed. Required to + lookup entry that is not a part of ``DPMS`` or ``DATAPLEX`` + ``integrated_system`` using its ``fully_qualified_name``. + Ignored in other cases. """ linked_resource: str = proto.Field( @@ -719,6 +741,14 @@ class LookupEntryRequest(proto.Message): number=5, oneof="target_name", ) + project: str = proto.Field( + proto.STRING, + number=6, + ) + location: str = proto.Field( + proto.STRING, + number=7, + ) class Entry(proto.Message): @@ -768,25 +798,12 @@ class Entry(proto.Message): slashes (/), dashes (-), and hashes (#). The maximum size is 200 bytes when encoded in UTF-8. fully_qualified_name (str): - Fully qualified name (FQN) of the resource. Set - automatically for entries representing resources from synced - systems. Settable only during creation and read-only - afterwards. Can be used for search and lookup of the - entries. - - FQNs take two forms: - - - For non-regionalized resources: - - ``{SYSTEM}:{PROJECT}.{PATH_TO_RESOURCE_SEPARATED_WITH_DOTS}`` - - - For regionalized resources: - - ``{SYSTEM}:{PROJECT}.{LOCATION_ID}.{PATH_TO_RESOURCE_SEPARATED_WITH_DOTS}`` - - Example for a DPMS table: - - ``dataproc_metastore:{PROJECT_ID}.{LOCATION_ID}.{INSTANCE_ID}.{DATABASE_ID}.{TABLE_ID}`` + `Fully Qualified Name + (FQN) `__ + of the resource. Set automatically for entries representing + resources from synced systems. Settable only during + creation, and read-only later. Can be used for search and + lookup of the entries. type_ (google.cloud.datacatalog_v1.types.EntryType): The type of the entry. Only used for entries with types listed in the ``EntryType`` enum. @@ -844,6 +861,12 @@ class Entry(proto.Message): Specification that applies to Looker sysstem. Only settable when ``user_specified_system`` is equal to ``LOOKER`` + This field is a member of `oneof`_ ``system_spec``. + cloud_bigtable_system_spec (google.cloud.datacatalog_v1.types.CloudBigtableSystemSpec): + Specification that applies to Cloud Bigtable system. Only + settable when ``integrated_system`` is equal to + ``CLOUD_BIGTABLE`` + This field is a member of `oneof`_ ``system_spec``. gcs_fileset_spec (google.cloud.datacatalog_v1.types.GcsFilesetSpec): Specification that applies to a Cloud Storage fileset. Valid @@ -884,6 +907,11 @@ class Entry(proto.Message): Specification that applies to a fileset resource. Valid only for entries with the ``FILESET`` type. + This field is a member of `oneof`_ ``spec``. + service_spec (google.cloud.datacatalog_v1.types.ServiceSpec): + Specification that applies to a Service + resource. + This field is a member of `oneof`_ ``spec``. display_name (str): Display name of an entry. @@ -974,6 +1002,12 @@ class Entry(proto.Message): oneof="system_spec", message="LookerSystemSpec", ) + cloud_bigtable_system_spec: "CloudBigtableSystemSpec" = proto.Field( + proto.MESSAGE, + number=41, + oneof="system_spec", + message="CloudBigtableSystemSpec", + ) gcs_fileset_spec: gcd_gcs_fileset_spec.GcsFilesetSpec = proto.Field( proto.MESSAGE, number=6, @@ -1016,6 +1050,12 @@ class Entry(proto.Message): oneof="spec", message="FilesetSpec", ) + service_spec: "ServiceSpec" = proto.Field( + proto.MESSAGE, + number=42, + oneof="spec", + message="ServiceSpec", + ) display_name: str = proto.Field( proto.STRING, number=3, @@ -1403,6 +1443,98 @@ class LookerSystemSpec(proto.Message): ) +class CloudBigtableSystemSpec(proto.Message): + r"""Specification that applies to all entries that are part of + ``CLOUD_BIGTABLE`` system (user_specified_type) + + Attributes: + instance_display_name (str): + Display name of the Instance. This is user + specified and different from the resource name. + """ + + instance_display_name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CloudBigtableInstanceSpec(proto.Message): + r"""Specification that applies to Instance entries that are part of + ``CLOUD_BIGTABLE`` system. (user_specified_type) + + Attributes: + cloud_bigtable_cluster_specs (MutableSequence[google.cloud.datacatalog_v1.types.CloudBigtableInstanceSpec.CloudBigtableClusterSpec]): + The list of clusters for the Instance. + """ + + class CloudBigtableClusterSpec(proto.Message): + r"""Spec that applies to clusters of an Instance of Cloud + Bigtable. + + Attributes: + display_name (str): + Name of the cluster. + location (str): + Location of the cluster, typically a Cloud + zone. + type_ (str): + Type of the resource. For a cluster this + would be "CLUSTER". + linked_resource (str): + A link back to the parent resource, in this + case Instance. + """ + + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + location: str = proto.Field( + proto.STRING, + number=2, + ) + type_: str = proto.Field( + proto.STRING, + number=3, + ) + linked_resource: str = proto.Field( + proto.STRING, + number=4, + ) + + cloud_bigtable_cluster_specs: MutableSequence[ + CloudBigtableClusterSpec + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=CloudBigtableClusterSpec, + ) + + +class ServiceSpec(proto.Message): + r"""Specification that applies to a Service resource. Valid only for + entries with the ``SERVICE`` type. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + cloud_bigtable_instance_spec (google.cloud.datacatalog_v1.types.CloudBigtableInstanceSpec): + Specification that applies to Instance entries of + ``CLOUD_BIGTABLE`` system. + + This field is a member of `oneof`_ ``system_spec``. + """ + + cloud_bigtable_instance_spec: "CloudBigtableInstanceSpec" = proto.Field( + proto.MESSAGE, + number=1, + oneof="system_spec", + message="CloudBigtableInstanceSpec", + ) + + class BusinessContext(proto.Message): r"""Business Context of the entry. @@ -2175,6 +2307,10 @@ class ImportEntriesRequest(proto.Message): a dump ready for ingestion. This field is a member of `oneof`_ ``source``. + job_id (str): + Optional. (Optional) Dataplex task job id, if + specified will be used as part of ImportEntries + LRO ID """ parent: str = proto.Field( @@ -2186,6 +2322,10 @@ class ImportEntriesRequest(proto.Message): number=2, oneof="source", ) + job_id: str = proto.Field( + proto.STRING, + number=3, + ) class ImportEntriesResponse(proto.Message): diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/dump_content.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/dump_content.py index c31d8208def0..051e883750a5 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/dump_content.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/dump_content.py @@ -43,13 +43,13 @@ class TaggedEntry(proto.Message): This field is a member of `oneof`_ ``entry``. present_tags (MutableSequence[google.cloud.datacatalog_v1.types.Tag]): - Tags that should be ingested into the Data - Catalog. Caller should populate template name, - column and fields. + Optional. Tags that should be ingested into + the Data Catalog. Caller should populate + template name, column and fields. absent_tags (MutableSequence[google.cloud.datacatalog_v1.types.Tag]): - Tags that should be deleted from the Data - Catalog. Caller should populate template name - and column only. + Optional. Tags that should be deleted from + the Data Catalog. Caller should populate + template name and column only. """ v1_entry: datacatalog.Entry = proto.Field( diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/policytagmanager.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/policytagmanager.py index ab5213ecfa2f..1a80cbf4d389 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/policytagmanager.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/policytagmanager.py @@ -128,7 +128,7 @@ class Service(proto.Message): name (google.cloud.datacatalog_v1.types.ManagingSystem): The Google Cloud service name. identity (str): - P4SA Identity of the service. + The service agent for the service. """ name: common.ManagingSystem = proto.Field( diff --git a/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json b/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json index 723626783764..1659a652e62c 100644 --- a/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json +++ b/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datacatalog", - "version": "3.12.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json b/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json index cc4fdfd49064..d7f3d15f70fa 100644 --- a/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json +++ b/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datacatalog", - "version": "3.12.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-datacatalog/scripts/fixup_datacatalog_v1_keywords.py b/packages/google-cloud-datacatalog/scripts/fixup_datacatalog_v1_keywords.py index 227703f8b304..1fd1bda32e96 100644 --- a/packages/google-cloud-datacatalog/scripts/fixup_datacatalog_v1_keywords.py +++ b/packages/google-cloud-datacatalog/scripts/fixup_datacatalog_v1_keywords.py @@ -60,14 +60,14 @@ class datacatalogCallTransformer(cst.CSTTransformer): 'get_policy_tag': ('name', ), 'get_tag_template': ('name', ), 'get_taxonomy': ('name', ), - 'import_entries': ('parent', 'gcs_bucket_path', ), + 'import_entries': ('parent', 'gcs_bucket_path', 'job_id', ), 'import_taxonomies': ('parent', 'inline_source', 'cross_regional_source', ), 'list_entries': ('parent', 'page_size', 'page_token', 'read_mask', ), 'list_entry_groups': ('parent', 'page_size', 'page_token', ), 'list_policy_tags': ('parent', 'page_size', 'page_token', ), 'list_tags': ('parent', 'page_size', 'page_token', ), 'list_taxonomies': ('parent', 'page_size', 'page_token', 'filter', ), - 'lookup_entry': ('linked_resource', 'sql_resource', 'fully_qualified_name', ), + 'lookup_entry': ('linked_resource', 'sql_resource', 'fully_qualified_name', 'project', 'location', ), 'modify_entry_contacts': ('name', 'contacts', ), 'modify_entry_overview': ('name', 'entry_overview', ), 'reconcile_tags': ('parent', 'tag_template', 'force_delete_missing', 'tags', ), diff --git a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_data_catalog.py b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_data_catalog.py index 6104a3f05464..12cebb9ad1bb 100644 --- a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_data_catalog.py +++ b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_data_catalog.py @@ -708,6 +708,7 @@ def test_search_catalog(request_type, transport: str = "grpc"): with mock.patch.object(type(client.transport.search_catalog), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datacatalog.SearchCatalogResponse( + total_size=1086, next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) @@ -720,6 +721,7 @@ def test_search_catalog(request_type, transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, pagers.SearchCatalogPager) + assert response.total_size == 1086 assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"] @@ -758,6 +760,7 @@ async def test_search_catalog_async( # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( datacatalog.SearchCatalogResponse( + total_size=1086, next_page_token="next_page_token_value", unreachable=["unreachable_value"], ) @@ -771,6 +774,7 @@ async def test_search_catalog_async( # Establish that the response is the type that we expect. assert isinstance(response, pagers.SearchCatalogAsyncPager) + assert response.total_size == 1086 assert response.next_page_token == "next_page_token_value" assert response.unreachable == ["unreachable_value"]